NotSamy commited on
Commit
97ee274
1 Parent(s): fad247f

First attempt

Browse files
Files changed (2) hide show
  1. app.py +47 -0
  2. requirement.txt +2 -0
app.py ADDED
@@ -0,0 +1,47 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlist as st
2
+ from llama_cpp import Llama
3
+
4
+ # Load the model
5
+ llm = Llama.from_pretrained(
6
+ repo_id="DavidAU/Command-R-01-200xq-Ultra-NEO-V1-35B-IMATRIX-GGUF",
7
+ filename="CommandR-35B-NEO-V1-D_AU-IQ3_XS-0200xq-imat13.gguf",
8
+ )
9
+
10
+ # Function to generate story (model)
11
+ def generate_story(user_input):
12
+ response = llm.create_chat_completion(
13
+ messages=[
14
+ {
15
+ "role": "user",
16
+ "content": user_input
17
+ }
18
+ ]
19
+ )
20
+ return response['choices'][0]['message']['content']
21
+
22
+ # Start storytelling
23
+ def start_storytelling():
24
+ print("Welcome to the Storytelling bot.")
25
+
26
+ story_prompt = input("What would you like your story to be about?")
27
+
28
+
29
+ story_part = generate_story(story_prompt)
30
+ print("\nHere's the beginning of your story:")
31
+ print(story_part)
32
+
33
+ # Continuation of story (generate part two)
34
+ while True:
35
+ continue_story = input("\nDo you want to continue the story? (yes/no): ")
36
+
37
+ if continue_story.lower() == 'yes':
38
+ user_input = input("\nWhat should happen next?")
39
+ story_part = generate_story(user_input)
40
+ print("\nContinued:")
41
+ print(story_part)
42
+ else:
43
+ print("\nEnjoy!")
44
+ break
45
+
46
+ # Function start
47
+ start_storytelling()
requirement.txt ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ transformers
2
+ llama-cpp-python