ouhenio commited on
Commit
45a9874
verified
1 Parent(s): 8324cfa

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +40 -1
app.py CHANGED
@@ -3,6 +3,9 @@ import os
3
  import sys
4
  import json
5
  import random
 
 
 
6
  from openai import OpenAI
7
 
8
  MODEL = "gpt-4.1-mini"
@@ -32,6 +35,7 @@ def get_env_list(key, default=""):
32
  DISABLED = get_env_bool("DISABLED", "False")
33
  OPENAI_API_KEYS = get_env_list("OPENAI_API_KEYS", "")
34
  NUM_THREADS = int(os.getenv("NUM_THREADS", "4"))
 
35
 
36
  def exception_handler(exception_type, exception, traceback):
37
  print(f"{exception_type.__name__}: {exception}")
@@ -39,6 +43,30 @@ def exception_handler(exception_type, exception, traceback):
39
  sys.excepthook = exception_handler
40
  sys.tracebacklimit = 0
41
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
42
  def predict(inputs, top_p, temperature, chat_counter, chatbot, history, request: gr.Request):
43
  if not OPENAI_API_KEYS or not OPENAI_API_KEYS[0]:
44
  yield [(history[i], history[i + 1]) for i in range(0, len(history) - 1, 2)], history, chat_counter, "No API keys configured", gr.update(interactive=True), gr.update(interactive=True)
@@ -47,6 +75,9 @@ def predict(inputs, top_p, temperature, chat_counter, chatbot, history, request:
47
  api_key = random.choice(OPENAI_API_KEYS)
48
  client = OpenAI(api_key=api_key)
49
 
 
 
 
50
  headers_dict = {key.decode('utf-8'): value.decode('utf-8') for key, value in request.headers.raw}
51
 
52
  messages = []
@@ -84,6 +115,7 @@ def predict(inputs, top_p, temperature, chat_counter, chatbot, history, request:
84
  token_counter += 1
85
  yield [(history[i], history[i + 1]) for i in range(0, len(history) - 1, 2)], history, chat_counter, "200", gr.update(interactive=False), gr.update(interactive=False)
86
 
 
87
  yield [(history[i], history[i + 1]) for i in range(0, len(history) - 1, 2)], history, chat_counter, "200", gr.update(interactive=True), gr.update(interactive=True)
88
 
89
  except Exception as e:
@@ -91,6 +123,12 @@ def predict(inputs, top_p, temperature, chat_counter, chatbot, history, request:
91
  yield [(history[i], history[i + 1]) for i in range(0, len(history) - 1, 2)], history, chat_counter, str(e), gr.update(interactive=True), gr.update(interactive=True)
92
 
93
  log_data = {
 
 
 
 
 
 
94
  "chat_counter": chat_counter,
95
  "model": MODEL,
96
  "messages": messages,
@@ -98,7 +136,8 @@ def predict(inputs, top_p, temperature, chat_counter, chatbot, history, request:
98
  "headers": headers_dict,
99
  "temperature": temperature,
100
  "top_p": top_p,
101
- "token_counter": token_counter
 
102
  }
103
  print(json.dumps(log_data))
104
 
 
3
  import sys
4
  import json
5
  import random
6
+ import hashlib
7
+ import requests
8
+ from datetime import datetime
9
  from openai import OpenAI
10
 
11
  MODEL = "gpt-4.1-mini"
 
35
  DISABLED = get_env_bool("DISABLED", "False")
36
  OPENAI_API_KEYS = get_env_list("OPENAI_API_KEYS", "")
37
  NUM_THREADS = int(os.getenv("NUM_THREADS", "4"))
38
+ IP_SALT = os.getenv("IP_SALT", "latamgpt-default-salt-2025")
39
 
40
  def exception_handler(exception_type, exception, traceback):
41
  print(f"{exception_type.__name__}: {exception}")
 
43
  sys.excepthook = exception_handler
44
  sys.tracebacklimit = 0
45
 
46
+ def get_user_fingerprint(request):
47
+ real_ip = (
48
+ request.headers.get('x-forwarded-for', '').split(',')[0].strip() or
49
+ request.headers.get('x-real-ip', '') or
50
+ getattr(request, 'client', {}).get('host', 'unknown')
51
+ )
52
+ fingerprint_data = f"{real_ip}:{IP_SALT}"
53
+ user_fingerprint = hashlib.sha256(fingerprint_data.encode()).hexdigest()[:16]
54
+ return real_ip, user_fingerprint
55
+
56
+ def get_country_from_ip(ip):
57
+ try:
58
+ response = requests.get(f"http://ip-api.com/json/{ip}", timeout=2)
59
+ if response.status_code == 200:
60
+ data = response.json()
61
+ return {
62
+ "country": data.get('country', 'Unknown'),
63
+ "country_code": data.get('countryCode', 'UN'),
64
+ "region": data.get('regionName', 'Unknown')
65
+ }
66
+ except:
67
+ pass
68
+ return {"country": "Unknown", "country_code": "UN", "region": "Unknown"}
69
+
70
  def predict(inputs, top_p, temperature, chat_counter, chatbot, history, request: gr.Request):
71
  if not OPENAI_API_KEYS or not OPENAI_API_KEYS[0]:
72
  yield [(history[i], history[i + 1]) for i in range(0, len(history) - 1, 2)], history, chat_counter, "No API keys configured", gr.update(interactive=True), gr.update(interactive=True)
 
75
  api_key = random.choice(OPENAI_API_KEYS)
76
  client = OpenAI(api_key=api_key)
77
 
78
+ session_id = getattr(request, 'session_hash', 'unknown')
79
+ real_ip, user_fingerprint = get_user_fingerprint(request)
80
+ geo_info = get_country_from_ip(real_ip)
81
  headers_dict = {key.decode('utf-8'): value.decode('utf-8') for key, value in request.headers.raw}
82
 
83
  messages = []
 
115
  token_counter += 1
116
  yield [(history[i], history[i + 1]) for i in range(0, len(history) - 1, 2)], history, chat_counter, "200", gr.update(interactive=False), gr.update(interactive=False)
117
 
118
+ # Re-enable inputs after streaming completes
119
  yield [(history[i], history[i + 1]) for i in range(0, len(history) - 1, 2)], history, chat_counter, "200", gr.update(interactive=True), gr.update(interactive=True)
120
 
121
  except Exception as e:
 
123
  yield [(history[i], history[i + 1]) for i in range(0, len(history) - 1, 2)], history, chat_counter, str(e), gr.update(interactive=True), gr.update(interactive=True)
124
 
125
  log_data = {
126
+ "session_id": session_id,
127
+ "user_fingerprint": user_fingerprint,
128
+ "conversation_id": f"{session_id}_{datetime.now().strftime('%Y%m%d_%H')}",
129
+ "country": geo_info["country"],
130
+ "country_code": geo_info["country_code"],
131
+ "region": geo_info["region"],
132
  "chat_counter": chat_counter,
133
  "model": MODEL,
134
  "messages": messages,
 
136
  "headers": headers_dict,
137
  "temperature": temperature,
138
  "top_p": top_p,
139
+ "token_counter": token_counter,
140
+ "timestamp": datetime.now().isoformat()
141
  }
142
  print(json.dumps(log_data))
143