from transformers import pipeline import gradio as gr pipe = pipeline( "text-generation", model="EleutherAI/MPT-7B-Instruct" # or MPT-7B-Instruct for better results ) def chat(user_input): output = pipe( user_input, max_length=100, do_sample=True, top_k=50, top_p=0.9, temperature=0.6 )[0]['generated_text'] return output demo = gr.Interface(fn=chat, inputs="text", outputs="text") demo.launch()