Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
@@ -12,7 +12,7 @@ import cv2
|
|
12 |
import base64
|
13 |
import logging
|
14 |
import time
|
15 |
-
from urllib.parse import quote #
|
16 |
|
17 |
import gradio as gr
|
18 |
import spaces
|
@@ -37,8 +37,45 @@ logging.basicConfig(
|
|
37 |
format='%(asctime)s - %(levelname)s - %(message)s'
|
38 |
)
|
39 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
40 |
def test_api_connection() -> str:
|
41 |
-
"""Test API server connection"""
|
42 |
try:
|
43 |
client = Client(API_URL)
|
44 |
return "API connection successful: Operating normally"
|
@@ -46,13 +83,15 @@ def test_api_connection() -> str:
|
|
46 |
logging.error(f"API connection test failed: {e}")
|
47 |
return f"API connection failed: {e}"
|
48 |
|
|
|
|
|
|
|
49 |
def generate_image(prompt: str, width: float, height: float, guidance: float, inference_steps: float, seed: float):
|
50 |
-
"""Image generation function (flexible return
|
51 |
if not prompt:
|
52 |
return None, "Error: A prompt is required."
|
53 |
try:
|
54 |
logging.info(f"Calling image generation API with prompt: {prompt}")
|
55 |
-
|
56 |
client = Client(API_URL)
|
57 |
result = client.predict(
|
58 |
prompt=prompt,
|
@@ -67,18 +106,13 @@ def generate_image(prompt: str, width: float, height: float, guidance: float, in
|
|
67 |
resize_img=True,
|
68 |
api_name="/generate_image"
|
69 |
)
|
70 |
-
|
71 |
logging.info(f"Image generation result: {type(result)}, length: {len(result) if isinstance(result, (list, tuple)) else 'unknown'}")
|
72 |
-
|
73 |
-
# Handle cases where the result is a tuple or list
|
74 |
if isinstance(result, (list, tuple)) and len(result) > 0:
|
75 |
-
image_data = result[0]
|
76 |
seed_info = result[1] if len(result) > 1 else "Unknown seed"
|
77 |
return image_data, seed_info
|
78 |
else:
|
79 |
-
# When a single value is returned
|
80 |
return result, "Unknown seed"
|
81 |
-
|
82 |
except Exception as e:
|
83 |
logging.error(f"Image generation failed: {str(e)}")
|
84 |
return None, f"Error: {str(e)}"
|
@@ -88,16 +122,11 @@ def fix_base64_padding(data):
|
|
88 |
"""Fix the padding of a Base64 string."""
|
89 |
if isinstance(data, bytes):
|
90 |
data = data.decode('utf-8')
|
91 |
-
|
92 |
-
# Remove the prefix if present
|
93 |
if "base64," in data:
|
94 |
data = data.split("base64,", 1)[1]
|
95 |
-
|
96 |
-
# Add padding characters (to make the length a multiple of 4)
|
97 |
missing_padding = len(data) % 4
|
98 |
if missing_padding:
|
99 |
data += '=' * (4 - missing_padding)
|
100 |
-
|
101 |
return data
|
102 |
|
103 |
# =============================================================================
|
@@ -110,18 +139,18 @@ def clear_cuda_cache():
|
|
110 |
gc.collect()
|
111 |
|
112 |
# =============================================================================
|
113 |
-
# SerpHouse
|
114 |
# =============================================================================
|
115 |
SERPHOUSE_API_KEY = os.getenv("SERPHOUSE_API_KEY", "")
|
116 |
|
117 |
def extract_keywords(text: str, top_k: int = 5) -> str:
|
118 |
-
"""
|
119 |
text = re.sub(r"[^a-zA-Z0-9κ°-ν£\s]", "", text)
|
120 |
tokens = text.split()
|
121 |
return " ".join(tokens[:top_k])
|
122 |
|
123 |
def do_web_search(query: str) -> str:
|
124 |
-
"""Call the SerpHouse LIVE API to return Markdown
|
125 |
try:
|
126 |
url = "https://api.serphouse.com/serp/live"
|
127 |
params = {
|
@@ -170,7 +199,7 @@ Below are the search results. Use this information to answer the query:
|
|
170 |
2. In your answer, explicitly cite the source of any used information (e.g., "[Source Title](link)").
|
171 |
3. Include the actual source links in your response.
|
172 |
4. Synthesize information from multiple sources.
|
173 |
-
5. At the end
|
174 |
"""
|
175 |
return instructions + "\n".join(summary_lines)
|
176 |
except Exception as e:
|
@@ -440,7 +469,7 @@ def run(
|
|
440 |
use_web_search: bool = False,
|
441 |
web_search_query: str = "",
|
442 |
age_group: str = "20s",
|
443 |
-
mbti_personality: str = "
|
444 |
sexual_openness: int = 2,
|
445 |
image_gen: bool = False # "Image Gen" checkbox status
|
446 |
) -> Iterator[str]:
|
@@ -449,7 +478,7 @@ def run(
|
|
449 |
return
|
450 |
temp_files = []
|
451 |
try:
|
452 |
-
# Append persona information
|
453 |
persona = (
|
454 |
f"{system_prompt.strip()}\n\n"
|
455 |
f"Gender: Female\n"
|
@@ -523,32 +552,30 @@ def run(
|
|
523 |
clear_cuda_cache()
|
524 |
|
525 |
# =============================================================================
|
526 |
-
# Modified model run function -
|
527 |
# =============================================================================
|
528 |
def modified_run(message, history, system_prompt, max_new_tokens, use_web_search, web_search_query,
|
529 |
-
|
530 |
-
#
|
|
|
|
|
531 |
output_so_far = ""
|
532 |
gallery_update = gr.Gallery(visible=False, value=[])
|
533 |
yield output_so_far, gallery_update
|
534 |
|
535 |
-
#
|
536 |
-
text_generator = run(message, history, system_prompt, max_new_tokens, use_web_search,
|
537 |
-
|
538 |
-
|
539 |
for text_chunk in text_generator:
|
540 |
output_so_far = text_chunk
|
541 |
yield output_so_far, gallery_update
|
542 |
|
543 |
-
#
|
544 |
if image_gen and message["text"].strip():
|
545 |
try:
|
546 |
width, height = 512, 512
|
547 |
guidance, steps, seed = 7.5, 30, 42
|
548 |
-
|
549 |
logger.info(f"Calling image generation for gallery with prompt: {message['text']}")
|
550 |
-
|
551 |
-
# Call the API to generate an image
|
552 |
image_result, seed_info = generate_image(
|
553 |
prompt=message["text"].strip(),
|
554 |
width=width,
|
@@ -557,73 +584,52 @@ def modified_run(message, history, system_prompt, max_new_tokens, use_web_search
|
|
557 |
inference_steps=steps,
|
558 |
seed=seed
|
559 |
)
|
560 |
-
|
561 |
if image_result:
|
562 |
-
# Process image data directly if it is a base64 string
|
563 |
if isinstance(image_result, str) and (
|
564 |
image_result.startswith('data:') or
|
565 |
(len(image_result) > 100 and '/' not in image_result)
|
566 |
):
|
567 |
try:
|
568 |
-
# Remove the data:image prefix if present
|
569 |
if image_result.startswith('data:'):
|
570 |
content_type, b64data = image_result.split(';base64,')
|
571 |
else:
|
572 |
b64data = image_result
|
573 |
-
content_type = "image/webp"
|
574 |
-
|
575 |
-
# Decode base64
|
576 |
image_bytes = base64.b64decode(b64data)
|
577 |
-
|
578 |
-
# Save to a temporary file
|
579 |
with tempfile.NamedTemporaryFile(delete=False, suffix=".webp") as temp_file:
|
580 |
temp_file.write(image_bytes)
|
581 |
temp_path = temp_file.name
|
582 |
-
|
583 |
-
# Update gallery to show the image
|
584 |
gallery_update = gr.Gallery(visible=True, value=[temp_path])
|
585 |
yield output_so_far + "\n\n*Image generated and displayed in the gallery below.*", gallery_update
|
586 |
-
|
587 |
except Exception as e:
|
588 |
logger.error(f"Error processing Base64 image: {e}")
|
589 |
yield output_so_far + f"\n\n(Error processing image: {e})", gallery_update
|
590 |
-
|
591 |
-
# If the result is a file path
|
592 |
elif isinstance(image_result, str) and os.path.exists(image_result):
|
593 |
gallery_update = gr.Gallery(visible=True, value=[image_result])
|
594 |
yield output_so_far + "\n\n*Image generated and displayed in the gallery below.*", gallery_update
|
595 |
-
|
596 |
-
# If the path is from /tmp (only on the API server)
|
597 |
elif isinstance(image_result, str) and '/tmp/' in image_result:
|
598 |
try:
|
599 |
client = Client(API_URL)
|
600 |
result = client.predict(
|
601 |
prompt=message["text"].strip(),
|
602 |
-
api_name="/generate_base64_image"
|
603 |
)
|
604 |
-
|
605 |
if isinstance(result, str) and (result.startswith('data:') or len(result) > 100):
|
606 |
if result.startswith('data:'):
|
607 |
content_type, b64data = result.split(';base64,')
|
608 |
else:
|
609 |
b64data = result
|
610 |
-
|
611 |
image_bytes = base64.b64decode(b64data)
|
612 |
-
|
613 |
with tempfile.NamedTemporaryFile(delete=False, suffix=".webp") as temp_file:
|
614 |
temp_file.write(image_bytes)
|
615 |
temp_path = temp_file.name
|
616 |
-
|
617 |
gallery_update = gr.Gallery(visible=True, value=[temp_path])
|
618 |
yield output_so_far + "\n\n*Image generated and displayed in the gallery below.*", gallery_update
|
619 |
else:
|
620 |
yield output_so_far + "\n\n(Image generation failed: Invalid format)", gallery_update
|
621 |
-
|
622 |
except Exception as e:
|
623 |
logger.error(f"Error calling alternative API: {e}")
|
624 |
yield output_so_far + f"\n\n(Image generation failed: {e})", gallery_update
|
625 |
-
|
626 |
-
# If the result is a URL
|
627 |
elif isinstance(image_result, str) and (
|
628 |
image_result.startswith('http://') or
|
629 |
image_result.startswith('https://')
|
@@ -631,43 +637,34 @@ def modified_run(message, history, system_prompt, max_new_tokens, use_web_search
|
|
631 |
try:
|
632 |
response = requests.get(image_result, timeout=10)
|
633 |
response.raise_for_status()
|
634 |
-
|
635 |
with tempfile.NamedTemporaryFile(delete=False, suffix=".webp") as temp_file:
|
636 |
temp_file.write(response.content)
|
637 |
temp_path = temp_file.name
|
638 |
-
|
639 |
gallery_update = gr.Gallery(visible=True, value=[temp_path])
|
640 |
yield output_so_far + "\n\n*Image generated and displayed in the gallery below.*", gallery_update
|
641 |
-
|
642 |
except Exception as e:
|
643 |
logger.error(f"URL image download error: {e}")
|
644 |
yield output_so_far + f"\n\n(Error downloading image: {e})", gallery_update
|
645 |
-
|
646 |
-
# If the image result is an image object (e.g., PIL Image)
|
647 |
elif hasattr(image_result, 'save'):
|
648 |
try:
|
649 |
with tempfile.NamedTemporaryFile(delete=False, suffix=".webp") as temp_file:
|
650 |
image_result.save(temp_file.name)
|
651 |
temp_path = temp_file.name
|
652 |
-
|
653 |
gallery_update = gr.Gallery(visible=True, value=[temp_path])
|
654 |
yield output_so_far + "\n\n*Image generated and displayed in the gallery below.*", gallery_update
|
655 |
-
|
656 |
except Exception as e:
|
657 |
logger.error(f"Error saving image object: {e}")
|
658 |
yield output_so_far + f"\n\n(Error saving image object: {e})", gallery_update
|
659 |
-
|
660 |
else:
|
661 |
yield output_so_far + f"\n\n(Unsupported image format: {type(image_result)})", gallery_update
|
662 |
else:
|
663 |
yield output_so_far + f"\n\n(Image generation failed: {seed_info})", gallery_update
|
664 |
-
|
665 |
except Exception as e:
|
666 |
logger.error(f"Error during gallery image generation: {e}")
|
667 |
yield output_so_far + f"\n\n(Image generation error: {e})", gallery_update
|
668 |
|
669 |
# =============================================================================
|
670 |
-
# Examples: 12 image/video examples +
|
671 |
# =============================================================================
|
672 |
examples = [
|
673 |
[
|
@@ -742,7 +739,6 @@ examples = [
|
|
742 |
"files": ["assets/additional-examples/3.png"],
|
743 |
}
|
744 |
],
|
745 |
-
|
746 |
[
|
747 |
{
|
748 |
"text": "Compare the two images and describe their similarities and differences.",
|
@@ -754,14 +750,12 @@ examples = [
|
|
754 |
"text": "A cute Persian cat is smiling while holding a cover with 'I LOVE YOU' written on it.",
|
755 |
}
|
756 |
],
|
757 |
-
|
758 |
]
|
759 |
|
760 |
# =============================================================================
|
761 |
# Gradio UI (Blocks) configuration
|
762 |
# =============================================================================
|
763 |
|
764 |
-
# 1. Gradio Blocks UI modification - Add gallery component for displaying generated images
|
765 |
css = """
|
766 |
.gradio-container {
|
767 |
background: rgba(255, 255, 255, 0.7);
|
@@ -772,17 +766,17 @@ css = """
|
|
772 |
}
|
773 |
"""
|
774 |
title_html = """
|
775 |
-
<h1 align="center" style="margin-bottom: 0.2em; font-size: 1.6em;"> π HeartSync
|
776 |
<p align="center" style="font-size:1.1em; color:#555;">
|
777 |
A lightweight and powerful AI service offering ChatGPT-4o-level multimodal, web search, and image generation capabilities for local installation. <br>
|
778 |
β
FLUX Image Generation β
Inference β
Censorship Bypass β
Multimodal & VLM β
Real-time Web Search β
RAG <br>
|
779 |
</p>
|
780 |
"""
|
781 |
|
782 |
-
with gr.Blocks(css=css, title="
|
783 |
gr.Markdown(title_html)
|
784 |
|
785 |
-
# Gallery component to store generated images
|
786 |
generated_images = gr.Gallery(
|
787 |
label="Generated Images",
|
788 |
show_label=True,
|
@@ -813,31 +807,7 @@ with gr.Blocks(css=css, title="HeartSync - World") as demo:
|
|
813 |
value="20s",
|
814 |
interactive=True
|
815 |
)
|
816 |
-
# MBTI
|
817 |
-
mbti_choices = [
|
818 |
-
"INTJ (The Architect) - Future-oriented with innovative strategies and thorough analysis. Example: [Dana Scully](https://en.wikipedia.org/wiki/Dana_Scully)",
|
819 |
-
"INTP (The Thinker) - Excels at theoretical analysis and creative problem solving. Example: [Velma Dinkley](https://en.wikipedia.org/wiki/Velma_Dinkley)",
|
820 |
-
"ENTJ (The Commander) - Strong leadership and clear goals with efficient strategic planning. Example: [Miranda Priestly](https://en.wikipedia.org/wiki/Miranda_Priestly)",
|
821 |
-
"ENTP (The Debater) - Innovative, challenge-seeking, and enjoys exploring new possibilities. Example: [Harley Quinn](https://en.wikipedia.org/wiki/Harley_Quinn)",
|
822 |
-
"INFJ (The Advocate) - Insightful, idealistic and morally driven. Example: [Wonder Woman](https://en.wikipedia.org/wiki/Wonder_Woman)",
|
823 |
-
"INFP (The Mediator) - Passionate and idealistic, pursuing core values with creativity. Example: [AmΓ©lie Poulain](https://en.wikipedia.org/wiki/Am%C3%A9lie)",
|
824 |
-
"ENFJ (The Protagonist) - Empathetic and dedicated to social harmony. Example: [Mulan](https://en.wikipedia.org/wiki/Mulan_(Disney))",
|
825 |
-
"ENFP (The Campaigner) - Inspiring and constantly sharing creative ideas. Example: [Elle Woods](https://en.wikipedia.org/wiki/Legally_Blonde)",
|
826 |
-
"ISTJ (The Logistician) - Systematic, dependable, and values tradition and rules. Example: [Clarice Starling](https://en.wikipedia.org/wiki/Clarice_Starling)",
|
827 |
-
"ISFJ (The Defender) - Compassionate and attentive to othersβ needs. Example: [Molly Weasley](https://en.wikipedia.org/wiki/Molly_Weasley)",
|
828 |
-
"ESTJ (The Executive) - Organized, practical, and demonstrates clear execution skills. Example: [Monica Geller](https://en.wikipedia.org/wiki/Monica_Geller)",
|
829 |
-
"ESFJ (The Consul) - Outgoing, cooperative, and an effective communicator. Example: [Rachel Green](https://en.wikipedia.org/wiki/Rachel_Green)",
|
830 |
-
"ISTP (The Virtuoso) - Analytical and resourceful, solving problems with quick thinking. Example: [Black Widow (Natasha Romanoff)](https://en.wikipedia.org/wiki/Black_Widow_(Marvel_Comics))",
|
831 |
-
"ISFP (The Adventurer) - Creative, sensitive, and appreciates artistic expression. Example: [Arwen](https://en.wikipedia.org/wiki/Arwen)",
|
832 |
-
"ESTP (The Entrepreneur) - Bold and action-oriented, thriving on challenges. Example: [Lara Croft](https://en.wikipedia.org/wiki/Lara_Croft)",
|
833 |
-
"ESFP (The Entertainer) - Energetic, spontaneous, and radiates positive energy. Example: [Phoebe Buffay](https://en.wikipedia.org/wiki/Phoebe_Buffay)"
|
834 |
-
]
|
835 |
-
mbti_dropdown = gr.Dropdown(
|
836 |
-
label="AI Persona MBTI (default: INTP)",
|
837 |
-
choices=mbti_choices,
|
838 |
-
value="INTP (The Thinker) - Excels at theoretical analysis and creative problem solving. Example: [Velma Dinkley](https://en.wikipedia.org/wiki/Velma_Dinkley)",
|
839 |
-
interactive=True
|
840 |
-
)
|
841 |
sexual_openness_slider = gr.Slider(
|
842 |
minimum=1, maximum=5, step=1, value=2,
|
843 |
label="Sexual Openness (1-5, default: 2)",
|
@@ -855,9 +825,9 @@ with gr.Blocks(css=css, title="HeartSync - World") as demo:
|
|
855 |
visible=False
|
856 |
)
|
857 |
|
858 |
-
# Chat interface creation
|
859 |
chat = gr.ChatInterface(
|
860 |
-
fn=modified_run, # Using the modified function
|
861 |
type="messages",
|
862 |
chatbot=gr.Chatbot(type="messages", scale=1, allow_tags=["image"]),
|
863 |
textbox=gr.MultimodalTextbox(
|
@@ -872,15 +842,13 @@ with gr.Blocks(css=css, title="HeartSync - World") as demo:
|
|
872 |
web_search_checkbox,
|
873 |
web_search_text,
|
874 |
age_group_dropdown,
|
875 |
-
mbti_dropdown,
|
876 |
sexual_openness_slider,
|
877 |
image_gen_checkbox,
|
878 |
],
|
879 |
additional_outputs=[
|
880 |
-
generated_images, #
|
881 |
],
|
882 |
stop_btn=False,
|
883 |
-
# title='<a href="https://discord.gg/openfreeai" target="_blank">https://discord.gg/openfreeai</a>',
|
884 |
examples=examples,
|
885 |
run_examples_on_click=False,
|
886 |
cache_examples=False,
|
@@ -888,7 +856,6 @@ with gr.Blocks(css=css, title="HeartSync - World") as demo:
|
|
888 |
delete_cache=(1800, 1800),
|
889 |
)
|
890 |
|
891 |
-
|
892 |
with gr.Row(elem_id="examples_row"):
|
893 |
with gr.Column(scale=12, elem_id="examples_container"):
|
894 |
gr.Markdown("### @Community https://discord.gg/openfreeai ")
|
|
|
12 |
import base64
|
13 |
import logging
|
14 |
import time
|
15 |
+
from urllib.parse import quote # For URL encoding
|
16 |
|
17 |
import gradio as gr
|
18 |
import spaces
|
|
|
37 |
format='%(asctime)s - %(levelname)s - %(message)s'
|
38 |
)
|
39 |
|
40 |
+
# =============================================================================
|
41 |
+
# Load MBTI setting from mbti.json and map to full description.
|
42 |
+
# =============================================================================
|
43 |
+
try:
|
44 |
+
with open("mbti.json", "r", encoding="utf-8") as f:
|
45 |
+
# Expecting a single MBTI key string, e.g., "entj"
|
46 |
+
mbti_key = json.load(f)
|
47 |
+
mbti_key = mbti_key.strip().lower() if isinstance(mbti_key, str) else "intp"
|
48 |
+
except Exception as e:
|
49 |
+
logging.error(f"Error reading mbti.json: {e}")
|
50 |
+
mbti_key = "intp" # default
|
51 |
+
|
52 |
+
mbti_mapping = {
|
53 |
+
"intj": "INTJ (The Architect) - Future-oriented with innovative strategies and thorough analysis. Example: [Dana Scully](https://en.wikipedia.org/wiki/Dana_Scully)",
|
54 |
+
"intp": "INTP (The Thinker) - Excels at theoretical analysis and creative problem solving. Example: [Velma Dinkley](https://en.wikipedia.org/wiki/Velma_Dinkley)",
|
55 |
+
"entj": "ENTJ (The Commander) - Strong leadership and clear goals with efficient strategic planning. Example: [Miranda Priestly](https://en.wikipedia.org/wiki/Miranda_Priestly)",
|
56 |
+
"entp": "ENTP (The Debater) - Innovative, challenge-seeking, and enjoys exploring new possibilities. Example: [Harley Quinn](https://en.wikipedia.org/wiki/Harley_Quinn)",
|
57 |
+
"infj": "INFJ (The Advocate) - Insightful, idealistic and morally driven. Example: [Wonder Woman](https://en.wikipedia.org/wiki/Wonder_Woman)",
|
58 |
+
"infp": "INFP (The Mediator) - Passionate and idealistic, pursuing core values with creativity. Example: [AmΓ©lie Poulain](https://en.wikipedia.org/wiki/Am%C3%A9lie)",
|
59 |
+
"enfj": "ENFJ (The Protagonist) - Empathetic and dedicated to social harmony. Example: [Mulan](https://en.wikipedia.org/wiki/Mulan_(Disney))",
|
60 |
+
"enfp": "ENFP (The Campaigner) - Inspiring and constantly sharing creative ideas. Example: [Elle Woods](https://en.wikipedia.org/wiki/Legally_Blonde)",
|
61 |
+
"istj": "ISTJ (The Logistician) - Systematic, dependable, and values tradition and rules. Example: [Clarice Starling](https://en.wikipedia.org/wiki/Clarice_Starling)",
|
62 |
+
"isfj": "ISFJ (The Defender) - Compassionate and attentive to othersβ needs. Example: [Molly Weasley](https://en.wikipedia.org/wiki/Molly_Weasley)",
|
63 |
+
"estj": "ESTJ (The Executive) - Organized, practical, and demonstrates clear execution skills. Example: [Monica Geller](https://en.wikipedia.org/wiki/Monica_Geller)",
|
64 |
+
"esfj": "ESFJ (The Consul) - Outgoing, cooperative, and an effective communicator. Example: [Rachel Green](https://en.wikipedia.org/wiki/Rachel_Green)",
|
65 |
+
"istp": "ISTP (The Virtuoso) - Analytical and resourceful, solving problems with quick thinking. Example: [Black Widow (Natasha Romanoff)](https://en.wikipedia.org/wiki/Black_Widow_(Marvel_Comics))",
|
66 |
+
"isfp": "ISFP (The Adventurer) - Creative, sensitive, and appreciates artistic expression. Example: [Arwen](https://en.wikipedia.org/wiki/Arwen)",
|
67 |
+
"estp": "ESTP (The Entrepreneur) - Bold and action-oriented, thriving on challenges. Example: [Lara Croft](https://en.wikipedia.org/wiki/Lara_Croft)",
|
68 |
+
"esfp": "ESFP (The Entertainer) - Energetic, spontaneous, and radiates positive energy. Example: [Phoebe Buffay](https://en.wikipedia.org/wiki/Phoebe_Buffay)"
|
69 |
+
}
|
70 |
+
|
71 |
+
# Use the mapped MBTI description, defaulting to INTP if not found
|
72 |
+
fixed_mbti = mbti_mapping.get(mbti_key, mbti_mapping["intp"])
|
73 |
+
|
74 |
+
# =============================================================================
|
75 |
+
# Test API Connection function
|
76 |
+
# =============================================================================
|
77 |
def test_api_connection() -> str:
|
78 |
+
"""Test API server connection."""
|
79 |
try:
|
80 |
client = Client(API_URL)
|
81 |
return "API connection successful: Operating normally"
|
|
|
83 |
logging.error(f"API connection test failed: {e}")
|
84 |
return f"API connection failed: {e}"
|
85 |
|
86 |
+
# =============================================================================
|
87 |
+
# Image Generation function
|
88 |
+
# =============================================================================
|
89 |
def generate_image(prompt: str, width: float, height: float, guidance: float, inference_steps: float, seed: float):
|
90 |
+
"""Image generation function (flexible return type)."""
|
91 |
if not prompt:
|
92 |
return None, "Error: A prompt is required."
|
93 |
try:
|
94 |
logging.info(f"Calling image generation API with prompt: {prompt}")
|
|
|
95 |
client = Client(API_URL)
|
96 |
result = client.predict(
|
97 |
prompt=prompt,
|
|
|
106 |
resize_img=True,
|
107 |
api_name="/generate_image"
|
108 |
)
|
|
|
109 |
logging.info(f"Image generation result: {type(result)}, length: {len(result) if isinstance(result, (list, tuple)) else 'unknown'}")
|
|
|
|
|
110 |
if isinstance(result, (list, tuple)) and len(result) > 0:
|
111 |
+
image_data = result[0]
|
112 |
seed_info = result[1] if len(result) > 1 else "Unknown seed"
|
113 |
return image_data, seed_info
|
114 |
else:
|
|
|
115 |
return result, "Unknown seed"
|
|
|
116 |
except Exception as e:
|
117 |
logging.error(f"Image generation failed: {str(e)}")
|
118 |
return None, f"Error: {str(e)}"
|
|
|
122 |
"""Fix the padding of a Base64 string."""
|
123 |
if isinstance(data, bytes):
|
124 |
data = data.decode('utf-8')
|
|
|
|
|
125 |
if "base64," in data:
|
126 |
data = data.split("base64,", 1)[1]
|
|
|
|
|
127 |
missing_padding = len(data) % 4
|
128 |
if missing_padding:
|
129 |
data += '=' * (4 - missing_padding)
|
|
|
130 |
return data
|
131 |
|
132 |
# =============================================================================
|
|
|
139 |
gc.collect()
|
140 |
|
141 |
# =============================================================================
|
142 |
+
# SerpHouse API functions
|
143 |
# =============================================================================
|
144 |
SERPHOUSE_API_KEY = os.getenv("SERPHOUSE_API_KEY", "")
|
145 |
|
146 |
def extract_keywords(text: str, top_k: int = 5) -> str:
|
147 |
+
"""Extract simple keywords: only retain English, Korean, numbers, and spaces."""
|
148 |
text = re.sub(r"[^a-zA-Z0-9κ°-ν£\s]", "", text)
|
149 |
tokens = text.split()
|
150 |
return " ".join(tokens[:top_k])
|
151 |
|
152 |
def do_web_search(query: str) -> str:
|
153 |
+
"""Call the SerpHouse LIVE API to return Markdown-formatted search results."""
|
154 |
try:
|
155 |
url = "https://api.serphouse.com/serp/live"
|
156 |
params = {
|
|
|
199 |
2. In your answer, explicitly cite the source of any used information (e.g., "[Source Title](link)").
|
200 |
3. Include the actual source links in your response.
|
201 |
4. Synthesize information from multiple sources.
|
202 |
+
5. At the end, add a "References:" section listing the main source links.
|
203 |
"""
|
204 |
return instructions + "\n".join(summary_lines)
|
205 |
except Exception as e:
|
|
|
469 |
use_web_search: bool = False,
|
470 |
web_search_query: str = "",
|
471 |
age_group: str = "20s",
|
472 |
+
mbti_personality: str = "", # Will be supplied as fixed_mbti
|
473 |
sexual_openness: int = 2,
|
474 |
image_gen: bool = False # "Image Gen" checkbox status
|
475 |
) -> Iterator[str]:
|
|
|
478 |
return
|
479 |
temp_files = []
|
480 |
try:
|
481 |
+
# Append persona information (including fixed MBTI info)
|
482 |
persona = (
|
483 |
f"{system_prompt.strip()}\n\n"
|
484 |
f"Gender: Female\n"
|
|
|
552 |
clear_cuda_cache()
|
553 |
|
554 |
# =============================================================================
|
555 |
+
# Modified model run function - fixed MBTI from file is used
|
556 |
# =============================================================================
|
557 |
def modified_run(message, history, system_prompt, max_new_tokens, use_web_search, web_search_query,
|
558 |
+
age_group, sexual_openness, image_gen):
|
559 |
+
# Use the fixed MBTI value (read from mbti.json)
|
560 |
+
fixed_mbti_value = fixed_mbti # Already loaded earlier
|
561 |
+
# Initialize gallery component and hide it initially
|
562 |
output_so_far = ""
|
563 |
gallery_update = gr.Gallery(visible=False, value=[])
|
564 |
yield output_so_far, gallery_update
|
565 |
|
566 |
+
# Call the main run() function with the fixed MBTI value
|
567 |
+
text_generator = run(message, history, system_prompt, max_new_tokens, use_web_search,
|
568 |
+
web_search_query, age_group, fixed_mbti_value, sexual_openness, image_gen)
|
|
|
569 |
for text_chunk in text_generator:
|
570 |
output_so_far = text_chunk
|
571 |
yield output_so_far, gallery_update
|
572 |
|
573 |
+
# Image generation handling (unchanged)
|
574 |
if image_gen and message["text"].strip():
|
575 |
try:
|
576 |
width, height = 512, 512
|
577 |
guidance, steps, seed = 7.5, 30, 42
|
|
|
578 |
logger.info(f"Calling image generation for gallery with prompt: {message['text']}")
|
|
|
|
|
579 |
image_result, seed_info = generate_image(
|
580 |
prompt=message["text"].strip(),
|
581 |
width=width,
|
|
|
584 |
inference_steps=steps,
|
585 |
seed=seed
|
586 |
)
|
|
|
587 |
if image_result:
|
|
|
588 |
if isinstance(image_result, str) and (
|
589 |
image_result.startswith('data:') or
|
590 |
(len(image_result) > 100 and '/' not in image_result)
|
591 |
):
|
592 |
try:
|
|
|
593 |
if image_result.startswith('data:'):
|
594 |
content_type, b64data = image_result.split(';base64,')
|
595 |
else:
|
596 |
b64data = image_result
|
597 |
+
content_type = "image/webp"
|
|
|
|
|
598 |
image_bytes = base64.b64decode(b64data)
|
|
|
|
|
599 |
with tempfile.NamedTemporaryFile(delete=False, suffix=".webp") as temp_file:
|
600 |
temp_file.write(image_bytes)
|
601 |
temp_path = temp_file.name
|
|
|
|
|
602 |
gallery_update = gr.Gallery(visible=True, value=[temp_path])
|
603 |
yield output_so_far + "\n\n*Image generated and displayed in the gallery below.*", gallery_update
|
|
|
604 |
except Exception as e:
|
605 |
logger.error(f"Error processing Base64 image: {e}")
|
606 |
yield output_so_far + f"\n\n(Error processing image: {e})", gallery_update
|
|
|
|
|
607 |
elif isinstance(image_result, str) and os.path.exists(image_result):
|
608 |
gallery_update = gr.Gallery(visible=True, value=[image_result])
|
609 |
yield output_so_far + "\n\n*Image generated and displayed in the gallery below.*", gallery_update
|
|
|
|
|
610 |
elif isinstance(image_result, str) and '/tmp/' in image_result:
|
611 |
try:
|
612 |
client = Client(API_URL)
|
613 |
result = client.predict(
|
614 |
prompt=message["text"].strip(),
|
615 |
+
api_name="/generate_base64_image"
|
616 |
)
|
|
|
617 |
if isinstance(result, str) and (result.startswith('data:') or len(result) > 100):
|
618 |
if result.startswith('data:'):
|
619 |
content_type, b64data = result.split(';base64,')
|
620 |
else:
|
621 |
b64data = result
|
|
|
622 |
image_bytes = base64.b64decode(b64data)
|
|
|
623 |
with tempfile.NamedTemporaryFile(delete=False, suffix=".webp") as temp_file:
|
624 |
temp_file.write(image_bytes)
|
625 |
temp_path = temp_file.name
|
|
|
626 |
gallery_update = gr.Gallery(visible=True, value=[temp_path])
|
627 |
yield output_so_far + "\n\n*Image generated and displayed in the gallery below.*", gallery_update
|
628 |
else:
|
629 |
yield output_so_far + "\n\n(Image generation failed: Invalid format)", gallery_update
|
|
|
630 |
except Exception as e:
|
631 |
logger.error(f"Error calling alternative API: {e}")
|
632 |
yield output_so_far + f"\n\n(Image generation failed: {e})", gallery_update
|
|
|
|
|
633 |
elif isinstance(image_result, str) and (
|
634 |
image_result.startswith('http://') or
|
635 |
image_result.startswith('https://')
|
|
|
637 |
try:
|
638 |
response = requests.get(image_result, timeout=10)
|
639 |
response.raise_for_status()
|
|
|
640 |
with tempfile.NamedTemporaryFile(delete=False, suffix=".webp") as temp_file:
|
641 |
temp_file.write(response.content)
|
642 |
temp_path = temp_file.name
|
|
|
643 |
gallery_update = gr.Gallery(visible=True, value=[temp_path])
|
644 |
yield output_so_far + "\n\n*Image generated and displayed in the gallery below.*", gallery_update
|
|
|
645 |
except Exception as e:
|
646 |
logger.error(f"URL image download error: {e}")
|
647 |
yield output_so_far + f"\n\n(Error downloading image: {e})", gallery_update
|
|
|
|
|
648 |
elif hasattr(image_result, 'save'):
|
649 |
try:
|
650 |
with tempfile.NamedTemporaryFile(delete=False, suffix=".webp") as temp_file:
|
651 |
image_result.save(temp_file.name)
|
652 |
temp_path = temp_file.name
|
|
|
653 |
gallery_update = gr.Gallery(visible=True, value=[temp_path])
|
654 |
yield output_so_far + "\n\n*Image generated and displayed in the gallery below.*", gallery_update
|
|
|
655 |
except Exception as e:
|
656 |
logger.error(f"Error saving image object: {e}")
|
657 |
yield output_so_far + f"\n\n(Error saving image object: {e})", gallery_update
|
|
|
658 |
else:
|
659 |
yield output_so_far + f"\n\n(Unsupported image format: {type(image_result)})", gallery_update
|
660 |
else:
|
661 |
yield output_so_far + f"\n\n(Image generation failed: {seed_info})", gallery_update
|
|
|
662 |
except Exception as e:
|
663 |
logger.error(f"Error during gallery image generation: {e}")
|
664 |
yield output_so_far + f"\n\n(Image generation error: {e})", gallery_update
|
665 |
|
666 |
# =============================================================================
|
667 |
+
# Examples: 12 image/video examples + additional examples
|
668 |
# =============================================================================
|
669 |
examples = [
|
670 |
[
|
|
|
739 |
"files": ["assets/additional-examples/3.png"],
|
740 |
}
|
741 |
],
|
|
|
742 |
[
|
743 |
{
|
744 |
"text": "Compare the two images and describe their similarities and differences.",
|
|
|
750 |
"text": "A cute Persian cat is smiling while holding a cover with 'I LOVE YOU' written on it.",
|
751 |
}
|
752 |
],
|
|
|
753 |
]
|
754 |
|
755 |
# =============================================================================
|
756 |
# Gradio UI (Blocks) configuration
|
757 |
# =============================================================================
|
758 |
|
|
|
759 |
css = """
|
760 |
.gradio-container {
|
761 |
background: rgba(255, 255, 255, 0.7);
|
|
|
766 |
}
|
767 |
"""
|
768 |
title_html = """
|
769 |
+
<h1 align="center" style="margin-bottom: 0.2em; font-size: 1.6em;"> π HeartSync Korea π </h1>
|
770 |
<p align="center" style="font-size:1.1em; color:#555;">
|
771 |
A lightweight and powerful AI service offering ChatGPT-4o-level multimodal, web search, and image generation capabilities for local installation. <br>
|
772 |
β
FLUX Image Generation β
Inference β
Censorship Bypass β
Multimodal & VLM β
Real-time Web Search β
RAG <br>
|
773 |
</p>
|
774 |
"""
|
775 |
|
776 |
+
with gr.Blocks(css=css, title="AgenticAI-Kv1") as demo:
|
777 |
gr.Markdown(title_html)
|
778 |
|
779 |
+
# Gallery component to store generated images
|
780 |
generated_images = gr.Gallery(
|
781 |
label="Generated Images",
|
782 |
show_label=True,
|
|
|
807 |
value="20s",
|
808 |
interactive=True
|
809 |
)
|
810 |
+
# Removed the MBTI dropdown entirely. The fixed MBTI from mbti.json is used instead.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
811 |
sexual_openness_slider = gr.Slider(
|
812 |
minimum=1, maximum=5, step=1, value=2,
|
813 |
label="Sexual Openness (1-5, default: 2)",
|
|
|
825 |
visible=False
|
826 |
)
|
827 |
|
828 |
+
# Chat interface creation using the modified_run function.
|
829 |
chat = gr.ChatInterface(
|
830 |
+
fn=modified_run, # Using the modified function with fixed MBTI.
|
831 |
type="messages",
|
832 |
chatbot=gr.Chatbot(type="messages", scale=1, allow_tags=["image"]),
|
833 |
textbox=gr.MultimodalTextbox(
|
|
|
842 |
web_search_checkbox,
|
843 |
web_search_text,
|
844 |
age_group_dropdown,
|
|
|
845 |
sexual_openness_slider,
|
846 |
image_gen_checkbox,
|
847 |
],
|
848 |
additional_outputs=[
|
849 |
+
generated_images, # Gallery component
|
850 |
],
|
851 |
stop_btn=False,
|
|
|
852 |
examples=examples,
|
853 |
run_examples_on_click=False,
|
854 |
cache_examples=False,
|
|
|
856 |
delete_cache=(1800, 1800),
|
857 |
)
|
858 |
|
|
|
859 |
with gr.Row(elem_id="examples_row"):
|
860 |
with gr.Column(scale=12, elem_id="examples_container"):
|
861 |
gr.Markdown("### @Community https://discord.gg/openfreeai ")
|