swapniild1601's picture
Update app.py
8b57c60 verified
# app.py
import gradio as gr
from hf_model_adapter import HFLocalModelAdapter
import os
# You can change this default to any HF model you prefer that is public or available to your account.
DEFAULT_MODEL = os.environ.get("HF_MODEL", "stabilityai/stablelm-3b-4e1t")
# Initialize adapter once
try:
hf_adapter = HFLocalModelAdapter(model_name=DEFAULT_MODEL)
except Exception as e:
# If model load failed, set adapter to None and show error in UI
hf_adapter = None
model_load_error = str(e)
else:
model_load_error = None
def radio_agents_pipeline(user_message):
"""
Simple Writer -> Editor -> QA flow using one HF model via adapter.
Returns combined multi-stage output.
"""
if hf_adapter is None:
return f"[Model not loaded] {model_load_error or 'unknown error'}"
# Writer
writer_prompt = (
"You are a radio script writer. Draft a short radio segment script based on: "
+ user_message
)
writer_out = hf_adapter.generate(writer_prompt, max_new_tokens=400)
# Editor
editor_prompt = (
"You are an editor. Improve clarity, shorten sentences, and make it radio-friendly.\n\n"
+ writer_out
)
edited_out = hf_adapter.generate(editor_prompt, max_new_tokens=300)
# QA
qa_prompt = (
"You are a broadcast compliance QA. Check for profanity, disallowed statements, "
"or anything requiring human review. Reply with 'OK' if fine, otherwise list issues.\n\n"
+ edited_out
)
qa_out = hf_adapter.generate(qa_prompt, max_new_tokens=150)
final_script = (
"πŸ“œ **Draft (Writer):**\n"
+ writer_out
+ "\n\nβœ‚οΈ **Edited (Editor):**\n"
+ edited_out
+ "\n\nβœ… **QA Result (QA):**\n"
+ qa_out
)
return final_script
with gr.Blocks() as demo:
gr.Markdown("# πŸŽ™οΈ AutoGen-style Radio Content Creator (Gradio)")
if model_load_error:
gr.Markdown(f"**Model load error:** `{model_load_error}`\n\nSet `HF_MODEL` env var or check logs.")
chatbot = gr.Chatbot(elem_id="chatbot", height=600)
with gr.Row():
txt = gr.Textbox(label="Enter your prompt (e.g., '2-min morning show script about local news')", lines=2)
btn = gr.Button("Send")
def user_submit(prompt, chat_history):
if not prompt or prompt.strip() == "":
return "", chat_history
response = radio_agents_pipeline(prompt)
chat_history.append(("You: " + prompt, "Assistant:\n" + response))
return "", chat_history
txt.submit(user_submit, [txt, chatbot], [txt, chatbot])
btn.click(user_submit, [txt, chatbot], [txt, chatbot])
def clear_chat():
return []
gr.Button("Clear Chat").click(clear_chat, None, chatbot)
if __name__ == "__main__":
demo.launch(server_name="0.0.0.0", server_port=7860)