import gradio as gr from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline model_id = "ajibawa-2023/Young-Children-Storyteller-Mistral-7B" # Load model and tokenizer (NOTE: This requires GPU for best results) tokenizer = AutoTokenizer.from_pretrained(model_id) model = AutoModelForCausalLM.from_pretrained(model_id, device_map="auto") generator = pipeline("text-generation", model=model, tokenizer=tokenizer) def generate_story(character_1, character_2, theme, setting, scenario): prompt = ( f"Characters: {character_1}, {character_2}\n" f"Theme: {theme}\n" f"Setting: {setting}\n" f"Scenario: {scenario}\n" f"Write a story suitable for children aged 6 to 12:" ) output = generator(prompt, max_new_tokens=300, temperature=0.8)[0]["generated_text"] return output[len(prompt):].strip() demo = gr.Interface( fn=generate_story, inputs=[ gr.Textbox(label="Character 1 (e.g. Girl)"), gr.Textbox(label="Character 2 (e.g. Wolf)"), gr.Textbox(label="Theme (e.g. Bravery)"), gr.Textbox(label="Setting (e.g. Rainy Forest)"), gr.Textbox(label="Scenario (e.g. A girl saw a wolf coming toward her)") ], outputs=gr.Textbox(label="📖 Generated Story"), title="🧚 Genieverse Story Generator", description="Enter your characters, theme, and scenario to generate a magical story for kids!" ) demo.launch()