Copuchat / app.py
ouhenio's picture
Update app.py
45a9874 verified
raw
history blame
10.2 kB
import gradio as gr
import os
import sys
import json
import random
import hashlib
import requests
from datetime import datetime
from openai import OpenAI
MODEL = "gpt-4.1-mini"
def get_env_bool(key, default="False"):
value = os.getenv(key, default)
if isinstance(value, bool):
return value
return str(value).lower() in ('true', '1', 'yes', 'on')
def get_env_list(key, default=""):
value = os.getenv(key, default)
if not value or value == "":
return []
if value.startswith('[') and value.endswith(']'):
try:
import json
parsed = json.loads(value)
if isinstance(parsed, list):
return [str(item).strip() for item in parsed if str(item).strip()]
except json.JSONDecodeError:
pass
return [item.strip() for item in str(value).split(',') if item.strip()]
DISABLED = get_env_bool("DISABLED", "False")
OPENAI_API_KEYS = get_env_list("OPENAI_API_KEYS", "")
NUM_THREADS = int(os.getenv("NUM_THREADS", "4"))
IP_SALT = os.getenv("IP_SALT", "latamgpt-default-salt-2025")
def exception_handler(exception_type, exception, traceback):
print(f"{exception_type.__name__}: {exception}")
sys.excepthook = exception_handler
sys.tracebacklimit = 0
def get_user_fingerprint(request):
real_ip = (
request.headers.get('x-forwarded-for', '').split(',')[0].strip() or
request.headers.get('x-real-ip', '') or
getattr(request, 'client', {}).get('host', 'unknown')
)
fingerprint_data = f"{real_ip}:{IP_SALT}"
user_fingerprint = hashlib.sha256(fingerprint_data.encode()).hexdigest()[:16]
return real_ip, user_fingerprint
def get_country_from_ip(ip):
try:
response = requests.get(f"http://ip-api.com/json/{ip}", timeout=2)
if response.status_code == 200:
data = response.json()
return {
"country": data.get('country', 'Unknown'),
"country_code": data.get('countryCode', 'UN'),
"region": data.get('regionName', 'Unknown')
}
except:
pass
return {"country": "Unknown", "country_code": "UN", "region": "Unknown"}
def predict(inputs, top_p, temperature, chat_counter, chatbot, history, request: gr.Request):
if not OPENAI_API_KEYS or not OPENAI_API_KEYS[0]:
yield [(history[i], history[i + 1]) for i in range(0, len(history) - 1, 2)], history, chat_counter, "No API keys configured", gr.update(interactive=True), gr.update(interactive=True)
return
api_key = random.choice(OPENAI_API_KEYS)
client = OpenAI(api_key=api_key)
session_id = getattr(request, 'session_hash', 'unknown')
real_ip, user_fingerprint = get_user_fingerprint(request)
geo_info = get_country_from_ip(real_ip)
headers_dict = {key.decode('utf-8'): value.decode('utf-8') for key, value in request.headers.raw}
messages = []
if chat_counter != 0:
for i, data in enumerate(history):
role = 'user' if i % 2 == 0 else 'assistant'
messages.append({"role": role, "content": data})
messages.append({"role": "user", "content": inputs})
chat_counter += 1
history.append(inputs)
token_counter = 0
partial_words = ""
try:
stream = client.chat.completions.create(
model=MODEL,
messages=messages,
temperature=temperature,
top_p=top_p,
stream=True,
presence_penalty=0,
frequency_penalty=0,
max_tokens=2048
)
for chunk in stream:
if chunk.choices[0].delta.content is not None:
partial_words += chunk.choices[0].delta.content
if token_counter == 0:
history.append(" " + partial_words)
else:
history[-1] = partial_words
token_counter += 1
yield [(history[i], history[i + 1]) for i in range(0, len(history) - 1, 2)], history, chat_counter, "200", gr.update(interactive=False), gr.update(interactive=False)
# Re-enable inputs after streaming completes
yield [(history[i], history[i + 1]) for i in range(0, len(history) - 1, 2)], history, chat_counter, "200", gr.update(interactive=True), gr.update(interactive=True)
except Exception as e:
print(f'OpenAI API error: {e}')
yield [(history[i], history[i + 1]) for i in range(0, len(history) - 1, 2)], history, chat_counter, str(e), gr.update(interactive=True), gr.update(interactive=True)
log_data = {
"session_id": session_id,
"user_fingerprint": user_fingerprint,
"conversation_id": f"{session_id}_{datetime.now().strftime('%Y%m%d_%H')}",
"country": geo_info["country"],
"country_code": geo_info["country_code"],
"region": geo_info["region"],
"chat_counter": chat_counter,
"model": MODEL,
"messages": messages,
"response": partial_words,
"headers": headers_dict,
"temperature": temperature,
"top_p": top_p,
"token_counter": token_counter,
"timestamp": datetime.now().isoformat()
}
print(json.dumps(log_data))
def reset_textbox():
return gr.update(value='', interactive=False), gr.update(interactive=False)
title = """<h1 align="center">LatamGPT Data Collection: Research Preview</h1>"""
if DISABLED:
title = """<h1 align="center" style="color:red">This app has reached usage limit. Please check back tomorrow.</h1>"""
description = """Language models can be conditioned to act like dialogue agents through a conversational prompt that typically takes the form:
```
User: <utterance>
Assistant: <utterance>
User: <utterance>
Assistant: <utterance>
...
```
In this app, you can explore the outputs of GPT-4.1 mini while contributing to LatamGPT research.
"""
with gr.Blocks(css="""#col_container { margin-left: auto; margin-right: auto;}
#chatbot {height: 520px; overflow: auto;}""") as demo:
gr.HTML(title)
with gr.Column(elem_id="col_container", visible=False) as main_block:
chatbot = gr.Chatbot(elem_id='chatbot')
inputs = gr.Textbox(placeholder="隆Hola! 驴En qu茅 puedo ayudarte?", label="Escribe tu mensaje y presiona Enter")
state = gr.State([])
with gr.Row():
with gr.Column(scale=7):
b1 = gr.Button(visible=not DISABLED)
with gr.Column(scale=3):
server_status_code = gr.Textbox(label="Status code from server")
with gr.Accordion("Parameters", open=False):
top_p = gr.Slider(minimum=0, maximum=1.0, value=1.0, step=0.05, interactive=True, label="Top-p (nucleus sampling)")
temperature = gr.Slider(minimum=0, maximum=2.0, value=0.7, step=0.1, interactive=True, label="Temperature")
chat_counter = gr.Number(value=0, visible=False, precision=0)
with gr.Column(elem_id="user_consent_container") as user_consent_block:
accept_checkbox = gr.Checkbox(visible=False)
js = "(x) => confirm('Al hacer clic en \"Acepto\", acepto que mis datos pueden ser publicados o compartidos para investigaci贸n.')"
with gr.Accordion("Consentimiento de Usuario para Recolecci贸n, Uso y Compartici贸n de Datos", open=True):
gr.HTML("""
<div>
<p>Al usar nuestra aplicaci贸n, que funciona con la API de OpenAI, reconoces y aceptas los siguientes t茅rminos sobre los datos que proporcionas:</p>
<ol>
<li><strong>Recolecci贸n:</strong> Podemos recopilar informaci贸n, incluyendo las entradas que escribes en nuestra aplicaci贸n, las salidas generadas por la API de OpenAI, y ciertos detalles t茅cnicos sobre tu dispositivo y conexi贸n (como tipo de navegador, sistema operativo e direcci贸n IP) proporcionados por los headers de solicitud de tu dispositivo.</li>
<li><strong>Uso:</strong> Podemos usar los datos recopilados para prop贸sitos de investigaci贸n, para mejorar nuestros servicios, y para desarrollar nuevos productos o servicios, incluyendo aplicaciones comerciales, y para prop贸sitos de seguridad, como proteger contra acceso no autorizado y ataques.</li>
<li><strong>Compartici贸n y Publicaci贸n:</strong> Tus datos, incluyendo los detalles t茅cnicos recopilados de los headers de solicitud de tu dispositivo, pueden ser publicados, compartidos con terceros, o usados para an谩lisis y prop贸sitos de reportes.</li>
<li><strong>Retenci贸n de Datos:</strong> Podemos retener tus datos, incluyendo los detalles t茅cnicos recopilados de los headers de solicitud de tu dispositivo, por el tiempo que sea necesario.</li>
</ol>
<p>Al continuar usando nuestra aplicaci贸n, proporcionas tu consentimiento expl铆cito para la recolecci贸n, uso y potencial compartici贸n de tus datos como se describe arriba. Si no est谩s de acuerdo con nuestras pr谩cticas de recolecci贸n, uso y compartici贸n de datos, por favor no uses nuestra aplicaci贸n.</p>
<p><strong>Este proyecto contribuye al desarrollo de LatamGPT, un modelo de lenguaje para Am茅rica Latina.</strong></p>
</div>
""")
accept_button = gr.Button("Acepto / I Agree")
def enable_inputs():
return gr.update(visible=False), gr.update(visible=True)
accept_button.click(None, None, accept_checkbox, js=js, queue=False)
accept_checkbox.change(fn=enable_inputs, inputs=[], outputs=[user_consent_block, main_block], queue=False)
inputs.submit(reset_textbox, [], [inputs, b1], queue=False)
inputs.submit(predict, [inputs, top_p, temperature, chat_counter, chatbot, state], [chatbot, state, chat_counter, server_status_code, inputs, b1])
b1.click(reset_textbox, [], [inputs, b1], queue=False)
b1.click(predict, [inputs, top_p, temperature, chat_counter, chatbot, state], [chatbot, state, chat_counter, server_status_code, inputs, b1])
if __name__ == "__main__":
demo.launch()