from transformers import pipeline # Load your custom-trained model model_name = "lachie0234/jammy-finetuned" # Replace with your model's name on Hugging Face qa_pipeline = pipeline("question-answering", model=model_name) # Define the function to handle user input def answer_question(question): context = "Legendary asnwer man." result = qa_pipeline(question=question, context=context) return result['answer'] # Set up the Gradio interface import gradio as gr interface = gr.Interface(fn=answer_question, inputs="text", outputs="text") interface.launch()