Spaces:
Runtime error
Runtime error
Upload 3 files
Browse files- app.py +53 -19
- generate_horror_plot.py +47 -8
- generate_plot.py +45 -6
app.py
CHANGED
@@ -1,8 +1,9 @@
|
|
1 |
import streamlit as st
|
2 |
from streamlit_option_menu import option_menu
|
3 |
-
from generate_plot import generate_plot, generate_storybook, generate_book_cover
|
4 |
-
from generate_horror_plot import generate_horror_plot, generate_horror_storybook
|
5 |
from markup import app_intro, how_use_intro
|
|
|
6 |
|
7 |
|
8 |
if "generated_code" not in st.session_state:
|
@@ -31,8 +32,6 @@ def tab2():
|
|
31 |
st.markdown("Create an enchanting children's book with custom characters and illustrations!")
|
32 |
st.write("Follow the magical instructions below:")
|
33 |
|
34 |
-
#openai_api_key = st.text_input("π Enter your OpenAI API key:", type='password')
|
35 |
-
|
36 |
col1, col2 = st.columns(2)
|
37 |
with col1:
|
38 |
st.subheader("Step 1: Enter the Age π§")
|
@@ -54,13 +53,30 @@ def tab2():
|
|
54 |
selected_style = st.selectbox("Select Art Style:", art_styles)
|
55 |
|
56 |
result = None
|
57 |
-
|
58 |
-
|
59 |
-
|
60 |
-
|
61 |
-
|
62 |
-
st.
|
63 |
-
st.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
64 |
|
65 |
|
66 |
|
@@ -93,7 +109,7 @@ def tab3():
|
|
93 |
|
94 |
st.subheader("Step 1: Choose the Number of Pages π")
|
95 |
st.write("Select the number of pages for your bone-chilling horror story (between 2 and 7).")
|
96 |
-
number_of_pages = st.number_input("Enter the Number of Pages:", min_value=2, max_value=7, value=
|
97 |
|
98 |
with col2:
|
99 |
|
@@ -103,13 +119,31 @@ def tab3():
|
|
103 |
selected_style = st.selectbox("Select Art Style:", art_styles)
|
104 |
|
105 |
result = None
|
106 |
-
|
107 |
-
|
108 |
-
|
109 |
-
|
110 |
-
|
111 |
-
st.
|
112 |
-
st.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
113 |
|
114 |
if st.button("Generate Horror Storybook"):
|
115 |
if not st.session_state.generated_horror_code:
|
|
|
1 |
import streamlit as st
|
2 |
from streamlit_option_menu import option_menu
|
3 |
+
from generate_plot import generate_plot, generate_storybook, generate_book_cover, set_openai_api_key
|
4 |
+
from generate_horror_plot import generate_horror_plot, generate_horror_storybook, set_openai_api_key
|
5 |
from markup import app_intro, how_use_intro
|
6 |
+
from g4f import Provider
|
7 |
|
8 |
|
9 |
if "generated_code" not in st.session_state:
|
|
|
32 |
st.markdown("Create an enchanting children's book with custom characters and illustrations!")
|
33 |
st.write("Follow the magical instructions below:")
|
34 |
|
|
|
|
|
35 |
col1, col2 = st.columns(2)
|
36 |
with col1:
|
37 |
st.subheader("Step 1: Enter the Age π§")
|
|
|
53 |
selected_style = st.selectbox("Select Art Style:", art_styles)
|
54 |
|
55 |
result = None
|
56 |
+
st.subheader("Step 5: Choose API π")
|
57 |
+
st.write("Select between G4F free providers or OpenAI. Please note that some G4F providers may not work all time")
|
58 |
+
provider = st.selectbox("Select API:", ["G4F", "OpenAI"])
|
59 |
+
|
60 |
+
if provider == "OpenAI":
|
61 |
+
openai_api_key = st.text_input("π Enter your OpenAI API key:", type='password')
|
62 |
+
if st.button("Generate Plot") and openai_api_key:
|
63 |
+
set_openai_api_key(openai_api_key)
|
64 |
+
with st.spinner('Generating plot...'):
|
65 |
+
result = generate_plot(number_of_pages, character_names, age, selected_style, provider="OpenAI")
|
66 |
+
st.code(result)
|
67 |
+
st.session_state.generated_code = result
|
68 |
+
elif not openai_api_key:
|
69 |
+
st.warning("Please enter your OpenAI API key.")
|
70 |
+
elif provider == "G4F":
|
71 |
+
selected_provider = st.selectbox("Select G4F Provider:", [
|
72 |
+
"Ails", "You", "Aichat", "Bard",
|
73 |
+
"Forefront", "DeepAi", "GetGpt"
|
74 |
+
])
|
75 |
+
if st.button("Generate Plot"):
|
76 |
+
with st.spinner('Generating plot...'):
|
77 |
+
result = generate_plot(number_of_pages, character_names, age, selected_style, provider="G4F", selected_provider=selected_provider)
|
78 |
+
st.code(result)
|
79 |
+
st.session_state.generated_code = result
|
80 |
|
81 |
|
82 |
|
|
|
109 |
|
110 |
st.subheader("Step 1: Choose the Number of Pages π")
|
111 |
st.write("Select the number of pages for your bone-chilling horror story (between 2 and 7).")
|
112 |
+
number_of_pages = st.number_input("Enter the Number of Pages:", min_value=2, max_value=7, value=2)
|
113 |
|
114 |
with col2:
|
115 |
|
|
|
119 |
selected_style = st.selectbox("Select Art Style:", art_styles)
|
120 |
|
121 |
result = None
|
122 |
+
st.subheader("Step 3: Choose API π")
|
123 |
+
st.write("Select between G4F free providers or OpenAI. Please note that some G4F providers may not work all time")
|
124 |
+
provider = st.selectbox("Select API:", ["G4F", "OpenAI"])
|
125 |
+
|
126 |
+
if provider == "OpenAI":
|
127 |
+
openai_api_key = st.text_input("π Enter your OpenAI API key:", type='password')
|
128 |
+
if st.button("Generate Plot") and openai_api_key:
|
129 |
+
set_openai_api_key(openai_api_key)
|
130 |
+
with st.spinner('Generating plot...'):
|
131 |
+
result = generate_horror_plot(number_of_pages, selected_style, provider="OpenAI")
|
132 |
+
st.code(result)
|
133 |
+
st.session_state.generated_code = result
|
134 |
+
elif not openai_api_key:
|
135 |
+
st.warning("Please enter your OpenAI API key.")
|
136 |
+
elif provider == "G4F":
|
137 |
+
selected_provider = st.selectbox("Select G4F Provider:", [
|
138 |
+
"Ails", "You", "Aichat", "Bard",
|
139 |
+
"Forefront", "DeepAi", "GetGpt"
|
140 |
+
])
|
141 |
+
if st.button("Generate Plot"):
|
142 |
+
|
143 |
+
with st.spinner('Generating plot...'):
|
144 |
+
result = generate_horror_plot(number_of_pages, selected_style, provider="G4F", selected_provider=selected_provider)
|
145 |
+
st.code(result)
|
146 |
+
st.session_state.generated_horror_code = result
|
147 |
|
148 |
if st.button("Generate Horror Storybook"):
|
149 |
if not st.session_state.generated_horror_code:
|
generate_horror_plot.py
CHANGED
@@ -29,8 +29,8 @@ Answer:"""
|
|
29 |
prompt = PromptTemplate(template=template, input_variables=["number_of_pages"])
|
30 |
|
31 |
def query(payload):
|
32 |
-
|
33 |
-
API_URL = "https://api-inference.huggingface.co/models/stablediffusionapi/all-526-animated"
|
34 |
headers = {"Authorization": "Bearer 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"}
|
35 |
response = requests.post(API_URL, headers=headers, json=payload)
|
36 |
return response.content
|
@@ -41,12 +41,51 @@ def query_alt(payload):
|
|
41 |
response = requests.post(API_URL, headers=headers, json=payload)
|
42 |
return response.content
|
43 |
|
44 |
-
def generate_horror_plot(number_of_pages, selected_style):
|
45 |
-
|
46 |
-
|
47 |
-
|
48 |
-
|
49 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
50 |
llm_chain = LLMChain(prompt=prompt, llm=llm)
|
51 |
response = llm_chain.run(number_of_pages=number_of_pages)
|
52 |
pages = response.split("<Text>")
|
|
|
29 |
prompt = PromptTemplate(template=template, input_variables=["number_of_pages"])
|
30 |
|
31 |
def query(payload):
|
32 |
+
API_URL = "https://api-inference.huggingface.co/models/prompthero/openjourney"
|
33 |
+
#API_URL = "https://api-inference.huggingface.co/models/stablediffusionapi/all-526-animated"
|
34 |
headers = {"Authorization": "Bearer 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"}
|
35 |
response = requests.post(API_URL, headers=headers, json=payload)
|
36 |
return response.content
|
|
|
41 |
response = requests.post(API_URL, headers=headers, json=payload)
|
42 |
return response.content
|
43 |
|
44 |
+
def generate_horror_plot(number_of_pages, selected_style, provider, selected_provider=None):
|
45 |
+
if provider == "OpenAI":
|
46 |
+
llm = OpenAI(temperature=0)
|
47 |
+
elif provider == "G4F":
|
48 |
+
if selected_provider == "Ails":
|
49 |
+
llm = G4FLLM(
|
50 |
+
model=Model.gpt_35_turbo,
|
51 |
+
provider=Provider.Ails,
|
52 |
+
)
|
53 |
+
elif selected_provider == "You":
|
54 |
+
llm = G4FLLM(
|
55 |
+
model=Model.gpt_35_turbo,
|
56 |
+
provider=Provider.You,
|
57 |
+
)
|
58 |
+
elif selected_provider == "GetGpt":
|
59 |
+
llm = G4FLLM(
|
60 |
+
model=Model.gpt_35_turbo,
|
61 |
+
provider=Provider.GetGpt,
|
62 |
+
)
|
63 |
+
elif selected_provider == "DeepAi":
|
64 |
+
llm = G4FLLM(
|
65 |
+
model=Model.gpt_35_turbo,
|
66 |
+
provider=Provider.DeepAi,
|
67 |
+
)
|
68 |
+
elif selected_provider == "Forefront":
|
69 |
+
llm = G4FLLM(
|
70 |
+
model=Model.gpt_35_turbo,
|
71 |
+
provider=Provider.Forefront,
|
72 |
+
)
|
73 |
+
elif selected_provider == "Aichat":
|
74 |
+
llm = G4FLLM(
|
75 |
+
model=Model.gpt_35_turbo,
|
76 |
+
provider=Provider.Aichat,
|
77 |
+
)
|
78 |
+
elif selected_provider == "Bard":
|
79 |
+
llm = G4FLLM(
|
80 |
+
model=Model.gpt_35_turbo,
|
81 |
+
provider=Provider.Bard,
|
82 |
+
)
|
83 |
+
# Add other providers here
|
84 |
+
else:
|
85 |
+
raise ValueError("Invalid G4F provider selected.")
|
86 |
+
else:
|
87 |
+
raise ValueError("Invalid provider selected.")
|
88 |
+
|
89 |
llm_chain = LLMChain(prompt=prompt, llm=llm)
|
90 |
response = llm_chain.run(number_of_pages=number_of_pages)
|
91 |
pages = response.split("<Text>")
|
generate_plot.py
CHANGED
@@ -42,12 +42,51 @@ def query_alt(payload):
|
|
42 |
response = requests.post(API_URL, headers=headers, json=payload)
|
43 |
return response.content
|
44 |
|
45 |
-
def generate_plot(number_of_pages, character_names, age, selected_style):
|
46 |
-
|
47 |
-
|
48 |
-
|
49 |
-
|
50 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
51 |
llm_chain = LLMChain(prompt=prompt, llm=llm)
|
52 |
response = llm_chain.run(number_of_pages=number_of_pages, character_names=character_names, age=age)
|
53 |
pages = response.split("<Text>")
|
|
|
42 |
response = requests.post(API_URL, headers=headers, json=payload)
|
43 |
return response.content
|
44 |
|
45 |
+
def generate_plot(number_of_pages, character_names, age, selected_style, provider, selected_provider=None):
|
46 |
+
if provider == "OpenAI":
|
47 |
+
llm = OpenAI(temperature=0)
|
48 |
+
elif provider == "G4F":
|
49 |
+
if selected_provider == "Ails":
|
50 |
+
llm = G4FLLM(
|
51 |
+
model=Model.gpt_35_turbo,
|
52 |
+
provider=Provider.Ails,
|
53 |
+
)
|
54 |
+
elif selected_provider == "You":
|
55 |
+
llm = G4FLLM(
|
56 |
+
model=Model.gpt_35_turbo,
|
57 |
+
provider=Provider.You,
|
58 |
+
)
|
59 |
+
elif selected_provider == "GetGpt":
|
60 |
+
llm = G4FLLM(
|
61 |
+
model=Model.gpt_35_turbo,
|
62 |
+
provider=Provider.GetGpt,
|
63 |
+
)
|
64 |
+
elif selected_provider == "DeepAi":
|
65 |
+
llm = G4FLLM(
|
66 |
+
model=Model.gpt_35_turbo,
|
67 |
+
provider=Provider.DeepAi,
|
68 |
+
)
|
69 |
+
elif selected_provider == "Forefront":
|
70 |
+
llm = G4FLLM(
|
71 |
+
model=Model.gpt_35_turbo,
|
72 |
+
provider=Provider.Forefront,
|
73 |
+
)
|
74 |
+
elif selected_provider == "Aichat":
|
75 |
+
llm = G4FLLM(
|
76 |
+
model=Model.gpt_35_turbo,
|
77 |
+
provider=Provider.Aichat,
|
78 |
+
)
|
79 |
+
elif selected_provider == "Bard":
|
80 |
+
llm = G4FLLM(
|
81 |
+
model=Model.gpt_35_turbo,
|
82 |
+
provider=Provider.Bard,
|
83 |
+
)
|
84 |
+
# Add other providers here
|
85 |
+
else:
|
86 |
+
raise ValueError("Invalid G4F provider selected.")
|
87 |
+
else:
|
88 |
+
raise ValueError("Invalid provider selected.")
|
89 |
+
|
90 |
llm_chain = LLMChain(prompt=prompt, llm=llm)
|
91 |
response = llm_chain.run(number_of_pages=number_of_pages, character_names=character_names, age=age)
|
92 |
pages = response.split("<Text>")
|