Spaces:
Sleeping
Sleeping
File size: 2,253 Bytes
76b1144 352c49c 03436df 352c49c bc82d3a 03436df bc82d3a 352c49c bc82d3a 03436df bc82d3a 352c49c 03436df bc82d3a 03436df bc82d3a 03436df bc82d3a 352c49c bc82d3a 352c49c 03436df bc82d3a 352c49c bc82d3a 352c49c 76b1144 bc82d3a 352c49c bc82d3a 352c49c |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 |
# app.py
import gradio as gr
from openai import OpenAI
DEFAULT_SYSTEM_PROMPT = "You are a helpful, concise assistant."
DEFAULT_MODEL = "gpt-5-chat-latest"
def chat_fn(message, history, system_prompt, temperature, max_tokens, api_key, base_url, model):
if not api_key:
return "β οΈ Please provide an OpenAI API key (top of the page)."
client = OpenAI(api_key=api_key, base_url=(base_url or "https://api.openai.com/v1").strip())
messages = [{"role": "system", "content": (system_prompt or DEFAULT_SYSTEM_PROMPT).strip()}]
for user_msg, assistant_msg in history:
if user_msg:
messages.append({"role": "user", "content": user_msg})
if assistant_msg:
messages.append({"role": "assistant", "content": assistant_msg})
messages.append({"role": "user", "content": message})
try:
resp = client.chat.completions.create(
model=(model or DEFAULT_MODEL).strip(),
messages=messages,
temperature=float(temperature) if temperature is not None else 0.7,
max_tokens=int(max_tokens) if max_tokens else None,
)
return resp.choices[0].message.content
except Exception as e:
return f"β Error: {e}"
with gr.Blocks(title="OpenAI Chat (Gradio)") as demo:
gr.Markdown("## π OpenAI Chatbot\nPaste your API key, pick a model, and chat.")
with gr.Row():
api_key = gr.Textbox(label="OpenAI API Key", type="password", placeholder="sk-...", lines=1)
base_url = gr.Textbox(label="Base URL", value="https://api.openai.com/v1", lines=1)
with gr.Row():
model = gr.Textbox(label="Model", value=DEFAULT_MODEL, lines=1)
system_prompt = gr.Textbox(label="System prompt", value=DEFAULT_SYSTEM_PROMPT, lines=2)
with gr.Row():
temperature = gr.Slider(minimum=0.0, maximum=2.0, value=0.7, step=0.1, label="Temperature")
max_tokens = gr.Slider(minimum=64, maximum=4096, value=512, step=32, label="Max tokens (response)")
gr.Markdown("---")
gr.ChatInterface(
fn=chat_fn,
title="OpenAI Chatbot",
additional_inputs=[system_prompt, temperature, max_tokens, api_key, base_url, model],
)
if __name__ == "__main__":
demo.launch()
|