tasal9 commited on
Commit
b7d1634
·
1 Parent(s): cd06a48

Enhance ECHO_MODE functionality and logging for improved testing and environment configuration

Browse files
Files changed (2) hide show
  1. __pycache__/app.cpython-313.pyc +0 -0
  2. app.py +38 -11
__pycache__/app.cpython-313.pyc CHANGED
Binary files a/__pycache__/app.cpython-313.pyc and b/__pycache__/app.cpython-313.pyc differ
 
app.py CHANGED
@@ -20,7 +20,23 @@ HEALTH_PORT = int(os.getenv("HEALTH_PORT", "8080"))
20
  GRADIO_HOST = os.getenv("GRADIO_HOST", "0.0.0.0")
21
  GRADIO_PORT = int(os.getenv("GRADIO_PORT", "7860"))
22
  DEFAULT_MAX_NEW_TOKENS = int(os.getenv("DEFAULT_MAX_NEW_TOKENS", "128"))
23
- ECHO_MODE = os.getenv("ECHO_MODE", "off").lower() # 'off'|'echo'|'useless'
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
24
 
25
 
26
  # ---------------- Logging ----------------
@@ -127,21 +143,22 @@ def predict(instruction: str,
127
  do_sample: bool,
128
  temperature: float,
129
  top_p: float,
130
- num_return_sequences: int):
 
131
  """Generate text using the cached pipeline and return output or error message."""
132
  if not instruction or not instruction.strip():
133
  return "⚠️ مهرباني وکړئ یوه لارښوونه ولیکئ." # please provide an instruction
134
 
135
  # Fast path: echo/useless mode avoids loading large models during testing.
136
- if ECHO_MODE in ("echo", "useless"):
137
- # If echo mode, return exactly the combined prompt
138
  prompt = instruction.strip()
139
  if input_text and input_text.strip():
140
  prompt += "\n" + input_text.strip()
141
- if ECHO_MODE == "echo":
142
  return prompt
143
- # If useless mode, return a simple placeholder text
144
- return "This is a useless placeholder response."
145
 
146
  # Build a simple prompt: instruction (+ input if provided)
147
  prompt = instruction.strip()
@@ -189,11 +206,13 @@ def predict(instruction: str,
189
 
190
  def build_ui():
191
  with gr.Blocks() as demo:
 
192
  gr.Markdown(
193
- """
194
  # ZamAI mT5 Pashto Demo
195
  اپلیکیشن **ZamAI-mT5-Pashto** د پښتو لارښوونو لپاره.
196
- لاندې تنظیمات بدل کړئ او لارښوونه ولیکئ ترڅو ځواب ترلاسه کړئ.
 
197
  """
198
  )
199
 
@@ -213,6 +232,12 @@ def build_ui():
213
  input_text = gr.Textbox(lines=2, placeholder="اختیاري متن...", label="متن")
214
  output = gr.Textbox(label="ځواب", interactive=False, lines=8)
215
  generate_btn = gr.Button("جوړول", variant="primary")
 
 
 
 
 
 
216
 
217
  with gr.Column(scale=1):
218
  gr.Markdown("### د تولید تنظیمات")
@@ -227,7 +252,7 @@ def build_ui():
227
 
228
  generate_btn.click(
229
  fn=predict,
230
- inputs=[instruction_textbox, input_text, max_new_tokens, num_beams, do_sample, temperature, top_p, num_return_sequences],
231
  outputs=output,
232
  )
233
 
@@ -242,4 +267,6 @@ if __name__ == "__main__":
242
  logger.exception("Failed to start health server")
243
 
244
  demo = build_ui()
245
- demo.launch(server_name=GRADIO_HOST, server_port=GRADIO_PORT)
 
 
 
20
  GRADIO_HOST = os.getenv("GRADIO_HOST", "0.0.0.0")
21
  GRADIO_PORT = int(os.getenv("GRADIO_PORT", "7860"))
22
  DEFAULT_MAX_NEW_TOKENS = int(os.getenv("DEFAULT_MAX_NEW_TOKENS", "128"))
23
+ ECHO_MODE = os.getenv("ECHO_MODE", "off").lower() # default env; UI can override at runtime
24
+ OFFLINE_FLAG = os.getenv("OFFLINE", "0").lower() in {"1", "true", "yes"}
25
+ if OFFLINE_FLAG:
26
+ os.environ["HF_HUB_OFFLINE"] = "1"
27
+
28
+ def _log_cache_env():
29
+ try:
30
+ import huggingface_hub as _hub
31
+ hub_cache = getattr(_hub.constants, 'HF_HUB_CACHE', None)
32
+ except Exception:
33
+ hub_cache = None
34
+ logging.info(
35
+ "Cache config: HF_HOME=%s TRANSFORMERS_CACHE=%s HF_HUB_OFFLINE=%s hub_cache=%s",
36
+ os.getenv("HF_HOME"), os.getenv("TRANSFORMERS_CACHE"), os.getenv("HF_HUB_OFFLINE"), hub_cache
37
+ )
38
+
39
+ _log_cache_env()
40
 
41
 
42
  # ---------------- Logging ----------------
 
143
  do_sample: bool,
144
  temperature: float,
145
  top_p: float,
146
+ num_return_sequences: int,
147
+ mode: str):
148
  """Generate text using the cached pipeline and return output or error message."""
149
  if not instruction or not instruction.strip():
150
  return "⚠️ مهرباني وکړئ یوه لارښوونه ولیکئ." # please provide an instruction
151
 
152
  # Fast path: echo/useless mode avoids loading large models during testing.
153
+ active_mode = (mode or "").strip().lower() or ECHO_MODE
154
+ if active_mode in ("echo", "useless"):
155
  prompt = instruction.strip()
156
  if input_text and input_text.strip():
157
  prompt += "\n" + input_text.strip()
158
+ if active_mode == "echo":
159
  return prompt
160
+ else:
161
+ return "This is a useless placeholder response."
162
 
163
  # Build a simple prompt: instruction (+ input if provided)
164
  prompt = instruction.strip()
 
206
 
207
  def build_ui():
208
  with gr.Blocks() as demo:
209
+ device_label = "GPU" if _detect_device() != -1 else "CPU"
210
  gr.Markdown(
211
+ f"""
212
  # ZamAI mT5 Pashto Demo
213
  اپلیکیشن **ZamAI-mT5-Pashto** د پښتو لارښوونو لپاره.
214
+ **Device:** {device_label} | **Env Mode:** {ECHO_MODE} | **Offline:** {os.getenv('HF_HUB_OFFLINE','0')}
215
+ که د موډ بدلول غواړئ لاندې د Mode selector څخه استفاده وکړئ.
216
  """
217
  )
218
 
 
232
  input_text = gr.Textbox(lines=2, placeholder="اختیاري متن...", label="متن")
233
  output = gr.Textbox(label="ځواب", interactive=False, lines=8)
234
  generate_btn = gr.Button("جوړول", variant="primary")
235
+ mode_selector = gr.Dropdown(
236
+ choices=["off", "echo", "useless"],
237
+ value=ECHO_MODE,
238
+ label="Mode (off=real, echo=return prompt, useless=fixed)",
239
+ interactive=True,
240
+ )
241
 
242
  with gr.Column(scale=1):
243
  gr.Markdown("### د تولید تنظیمات")
 
252
 
253
  generate_btn.click(
254
  fn=predict,
255
+ inputs=[instruction_textbox, input_text, max_new_tokens, num_beams, do_sample, temperature, top_p, num_return_sequences, mode_selector],
256
  outputs=output,
257
  )
258
 
 
267
  logger.exception("Failed to start health server")
268
 
269
  demo = build_ui()
270
+ demo.launch(server_name=GRADIO_HOST, server_port=GRADIO_PORT)
271
+
272
+ logging.info("HF_HOME=%s TRANSFORMERS_CACHE=%s", os.getenv("HF_HOME"), os.getenv("TRANSFORMERS_CACHE"))