Spaces:
Running
Running
Chandima Prabhath
commited on
Commit
·
9e02558
1
Parent(s):
34f6836
Refactor LLM functions by removing unused summarize, translate, and meme functions; update generate_llm to increase max_tokens limit for improved response handling.
Browse files
app.py
CHANGED
@@ -53,7 +53,7 @@ def get_thread_context():
|
|
53 |
|
54 |
# --- Conversation History -------------------------------------------------
|
55 |
|
56 |
-
history = defaultdict(lambda: deque(maxlen=
|
57 |
|
58 |
def record_user_message(chat_id, sender, message):
|
59 |
history[(chat_id, sender)].append(f"User: {message}")
|
@@ -185,14 +185,6 @@ def _fn_send_accept(mid, cid, message):
|
|
185 |
if chat_id and sender:
|
186 |
record_bot_message(chat_id, sender, message)
|
187 |
|
188 |
-
def _fn_summarize(mid, cid, text):
|
189 |
-
summary = generate_llm(f"Summarize:\n\n{text}")
|
190 |
-
_fn_send_text(mid, cid, summary)
|
191 |
-
|
192 |
-
def _fn_translate(mid, cid, lang, text):
|
193 |
-
resp = generate_llm(f"Translate to {lang}:\n\n{text}")
|
194 |
-
_fn_send_text(mid, cid, resp)
|
195 |
-
|
196 |
def _fn_joke(mid, cid):
|
197 |
try:
|
198 |
j = requests.get(
|
@@ -213,15 +205,6 @@ def _fn_inspire(mid, cid):
|
|
213 |
quote = generate_llm("Give me a unique, random short inspirational quote.")
|
214 |
_fn_send_text(mid, cid, f"✨ {quote}")
|
215 |
|
216 |
-
def _fn_meme(mid, cid, txt):
|
217 |
-
_fn_send_accept(mid, cid, "🎨 Generating meme…")
|
218 |
-
task_queue.put({
|
219 |
-
"type": "image",
|
220 |
-
"message_id": mid,
|
221 |
-
"chat_id": cid,
|
222 |
-
"prompt": f"meme: {txt}"
|
223 |
-
})
|
224 |
-
|
225 |
def _fn_generate_images(
|
226 |
message_id: str,
|
227 |
chat_id: str,
|
|
|
53 |
|
54 |
# --- Conversation History -------------------------------------------------
|
55 |
|
56 |
+
history = defaultdict(lambda: deque(maxlen=10))
|
57 |
|
58 |
def record_user_message(chat_id, sender, message):
|
59 |
history[(chat_id, sender)].append(f"User: {message}")
|
|
|
185 |
if chat_id and sender:
|
186 |
record_bot_message(chat_id, sender, message)
|
187 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
188 |
def _fn_joke(mid, cid):
|
189 |
try:
|
190 |
j = requests.get(
|
|
|
205 |
quote = generate_llm("Give me a unique, random short inspirational quote.")
|
206 |
_fn_send_text(mid, cid, f"✨ {quote}")
|
207 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
208 |
def _fn_generate_images(
|
209 |
message_id: str,
|
210 |
chat_id: str,
|
polLLM.py
CHANGED
@@ -63,6 +63,7 @@ def generate_llm(
|
|
63 |
model=model,
|
64 |
messages=messages,
|
65 |
seed=seed,
|
|
|
66 |
)
|
67 |
text = resp.choices[0].message.content.strip()
|
68 |
logger.debug("LLM response received")
|
|
|
63 |
model=model,
|
64 |
messages=messages,
|
65 |
seed=seed,
|
66 |
+
max_tokens=4000,
|
67 |
)
|
68 |
text = resp.choices[0].message.content.strip()
|
69 |
logger.debug("LLM response received")
|