Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
|
@@ -4,25 +4,10 @@ import requests
|
|
| 4 |
import time
|
| 5 |
import os
|
| 6 |
import re
|
| 7 |
-
|
| 8 |
-
hf_token = os.environ.get("HF_TOKEN")
|
| 9 |
from gradio_client import Client
|
| 10 |
|
| 11 |
is_shared_ui = True if "fffiloni/consistent-character" in os.environ['SPACE_ID'] else False
|
| 12 |
|
| 13 |
-
|
| 14 |
-
def safety_check(user_prompt):
|
| 15 |
-
|
| 16 |
-
client = Client("fffiloni/safety-checker-bot", hf_token=hf_token)
|
| 17 |
-
response = client.predict(
|
| 18 |
-
source_space="consistent-character space",
|
| 19 |
-
user_prompt=user_prompt,
|
| 20 |
-
api_name="/infer"
|
| 21 |
-
)
|
| 22 |
-
print(response)
|
| 23 |
-
|
| 24 |
-
return response
|
| 25 |
-
|
| 26 |
from utils.gradio_helpers import parse_outputs, process_outputs
|
| 27 |
|
| 28 |
names = ['prompt', 'negative_prompt', 'subject', 'number_of_outputs', 'number_of_images_per_pose', 'randomise_poses', 'output_format', 'output_quality', 'seed']
|
|
@@ -37,6 +22,20 @@ def predict(request: gr.Request, *args, progress=gr.Progress(track_tqdm=True)):
|
|
| 37 |
|
| 38 |
try:
|
| 39 |
if is_shared_ui:
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 40 |
is_safe = safety_check(args[0])
|
| 41 |
print(is_safe)
|
| 42 |
|
|
|
|
| 4 |
import time
|
| 5 |
import os
|
| 6 |
import re
|
|
|
|
|
|
|
| 7 |
from gradio_client import Client
|
| 8 |
|
| 9 |
is_shared_ui = True if "fffiloni/consistent-character" in os.environ['SPACE_ID'] else False
|
| 10 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 11 |
from utils.gradio_helpers import parse_outputs, process_outputs
|
| 12 |
|
| 13 |
names = ['prompt', 'negative_prompt', 'subject', 'number_of_outputs', 'number_of_images_per_pose', 'randomise_poses', 'output_format', 'output_quality', 'seed']
|
|
|
|
| 22 |
|
| 23 |
try:
|
| 24 |
if is_shared_ui:
|
| 25 |
+
hf_token = os.environ.get("HF_TOKEN")
|
| 26 |
+
|
| 27 |
+
def safety_check(user_prompt):
|
| 28 |
+
|
| 29 |
+
client = Client("fffiloni/safety-checker-bot", hf_token=hf_token)
|
| 30 |
+
response = client.predict(
|
| 31 |
+
source_space="consistent-character space",
|
| 32 |
+
user_prompt=user_prompt,
|
| 33 |
+
api_name="/infer"
|
| 34 |
+
)
|
| 35 |
+
print(response)
|
| 36 |
+
|
| 37 |
+
return response
|
| 38 |
+
|
| 39 |
is_safe = safety_check(args[0])
|
| 40 |
print(is_safe)
|
| 41 |
|