Spaces:
Paused
Paused
Update app.py
Browse files
app.py
CHANGED
|
@@ -33,14 +33,19 @@ def generate_story(text):
|
|
| 33 |
prompt = PromptTemplate(
|
| 34 |
input_variables=["text"],
|
| 35 |
template="""
|
| 36 |
-
|
|
|
|
| 37 |
"""
|
| 38 |
)
|
| 39 |
story = LLMChain(llm=llm, prompt=prompt)
|
| 40 |
story_result = story.run(text=text)
|
| 41 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 42 |
max_length = 250
|
| 43 |
-
|
| 44 |
text_chunks = split_text(large_text, max_length)
|
| 45 |
for chunk in text_chunks:
|
| 46 |
print(chunk)
|
|
|
|
| 33 |
prompt = PromptTemplate(
|
| 34 |
input_variables=["text"],
|
| 35 |
template="""
|
| 36 |
+
You are a fun and seasoned storyteller.
|
| 37 |
+
Generate a short story for a 5 years old audience about {text}.
|
| 38 |
"""
|
| 39 |
)
|
| 40 |
story = LLMChain(llm=llm, prompt=prompt)
|
| 41 |
story_result = story.run(text=text)
|
| 42 |
+
print(story_result)
|
| 43 |
+
print("""
|
| 44 |
+
—
|
| 45 |
+
Cutting text in chunks
|
| 46 |
+
—
|
| 47 |
+
""")
|
| 48 |
max_length = 250
|
|
|
|
| 49 |
text_chunks = split_text(large_text, max_length)
|
| 50 |
for chunk in text_chunks:
|
| 51 |
print(chunk)
|