import ollama import gradio as gr def chat(prompt): response = ollama.chat(model="mistral", messages=[{"role": "user", "content": prompt}]) return response["message"]["content"] iface = gr.Interface(fn=chat, inputs="text", outputs="text") iface.launch()