Spaces:
Running
Running
import streamlist as st | |
from llama_cpp import Llama | |
# Load the model | |
llm = Llama.from_pretrained( | |
repo_id="DavidAU/Command-R-01-200xq-Ultra-NEO-V1-35B-IMATRIX-GGUF", | |
filename="CommandR-35B-NEO-V1-D_AU-IQ3_XS-0200xq-imat13.gguf", | |
) | |
# Function to generate story (model) | |
def generate_story(user_input): | |
response = llm.create_chat_completion( | |
messages=[ | |
{ | |
"role": "user", | |
"content": user_input | |
} | |
] | |
) | |
return response['choices'][0]['message']['content'] | |
# Start storytelling | |
def start_storytelling(): | |
print("Welcome to the Storytelling bot.") | |
story_prompt = input("What would you like your story to be about?") | |
story_part = generate_story(story_prompt) | |
print("\nHere's the beginning of your story:") | |
print(story_part) | |
# Continuation of story (generate part two) | |
while True: | |
continue_story = input("\nDo you want to continue the story? (yes/no): ") | |
if continue_story.lower() == 'yes': | |
user_input = input("\nWhat should happen next?") | |
story_part = generate_story(user_input) | |
print("\nContinued:") | |
print(story_part) | |
else: | |
print("\nEnjoy!") | |
break | |
# Function start | |
start_storytelling() |