Spaces:
Sleeping
Sleeping
Use temp file for the result of api request
Browse files
app.py
CHANGED
@@ -26,9 +26,8 @@ def infer(prompt):
|
|
26 |
# if re.search(rf"\b{filter}\b", prompt):
|
27 |
# raise gr.Error("Unsafe content found. Please try again with different prompts.")
|
28 |
#
|
29 |
-
|
30 |
-
image =
|
31 |
-
image = np.array(Image.open(image).convert('RGB'))
|
32 |
images = [image]
|
33 |
return images
|
34 |
|
|
|
26 |
# if re.search(rf"\b{filter}\b", prompt):
|
27 |
# raise gr.Error("Unsafe content found. Please try again with different prompts.")
|
28 |
#
|
29 |
+
image_file = text_to_image(prompt)
|
30 |
+
image = np.array(Image.open(image_file).convert('RGB'))
|
|
|
31 |
images = [image]
|
32 |
return images
|
33 |
|
fair.py
CHANGED
@@ -1,11 +1,12 @@
|
|
1 |
import json
|
2 |
import os
|
3 |
import time
|
4 |
-
from typing import List
|
5 |
import logging
|
6 |
logger = logging.getLogger()
|
7 |
|
8 |
import requests
|
|
|
9 |
|
10 |
SERVER_ADRESS="https://faircompute.com:8000/api/v1"
|
11 |
#SERVER_ADRESS="http://localhost:8000/api/v1"
|
@@ -99,7 +100,7 @@ def put_job_stream_eof(token, job_id, name):
|
|
99 |
return response.text
|
100 |
|
101 |
|
102 |
-
def wait_for_file(token, job_id, path,
|
103 |
headers = {
|
104 |
'Authorization': f'Bearer {token}'
|
105 |
}
|
@@ -109,17 +110,18 @@ def wait_for_file(token, job_id, path, local_path, attempts=10):
|
|
109 |
try:
|
110 |
with requests.get(url=url, headers=headers, stream=True) as r:
|
111 |
r.raise_for_status()
|
112 |
-
|
113 |
-
|
114 |
-
|
115 |
|
116 |
-
print(f"File {
|
117 |
-
|
|
|
118 |
except Exception as e:
|
119 |
print(e)
|
120 |
time.sleep(0.5)
|
121 |
|
122 |
-
print(f"Failed to receive {
|
123 |
|
124 |
|
125 |
def text_to_image(text):
|
@@ -180,75 +182,16 @@ def text_to_image(text):
|
|
180 |
logger.info("Done!")
|
181 |
else:
|
182 |
logger.info("Job Failed")
|
183 |
-
|
184 |
job_id=job_id,
|
185 |
-
path="%2Fworkspace%2Fresult.png"
|
186 |
-
|
187 |
-
logger.info(resp)
|
188 |
-
return resp
|
189 |
|
190 |
|
191 |
if __name__=="__main__":
|
192 |
-
|
193 |
-
|
194 |
-
|
195 |
-
|
196 |
-
print(token)
|
197 |
-
|
198 |
-
summary = get_cluster_summary(token=token)
|
199 |
-
print("Summary:")
|
200 |
-
print(summary)
|
201 |
-
program_id = put_program(token=token,
|
202 |
-
launcher="Docker",
|
203 |
-
image=DOCKER_IMAGE,
|
204 |
-
runtime="nvidia",
|
205 |
-
command=[])
|
206 |
-
print(program_id)
|
207 |
-
|
208 |
-
job_id = put_job(token=token,
|
209 |
-
program_id=program_id,
|
210 |
-
input_files=[],
|
211 |
-
output_files=["/workspace/result.png"])
|
212 |
-
|
213 |
-
print(job_id)
|
214 |
|
215 |
-
status = get_job_status(token=token,
|
216 |
-
job_id=job_id)
|
217 |
-
print(status)
|
218 |
-
|
219 |
-
while status != "Processing" and status != "Completed":
|
220 |
-
status = get_job_status(token=token,
|
221 |
-
job_id=job_id)
|
222 |
-
print(status)
|
223 |
-
time.sleep(0.5)
|
224 |
-
|
225 |
-
res = put_job_stream_data(token=token,
|
226 |
-
job_id=job_id,
|
227 |
-
name="stdin",
|
228 |
-
data="Robot dinozaur\n")
|
229 |
-
print(res)
|
230 |
-
|
231 |
-
res = put_job_stream_eof(token=token,
|
232 |
-
job_id=job_id,
|
233 |
-
name="stdin")
|
234 |
-
print(res)
|
235 |
-
|
236 |
-
status = get_job_status(token=token,
|
237 |
-
job_id=job_id)
|
238 |
-
print(status)
|
239 |
-
|
240 |
-
while status == "Processing":
|
241 |
-
status = get_job_status(token=token,
|
242 |
-
job_id=job_id)
|
243 |
-
print(status)
|
244 |
-
time.sleep(0.5)
|
245 |
-
if status == "Completed":
|
246 |
-
print("Done!")
|
247 |
-
else:
|
248 |
-
print("Job Failed")
|
249 |
-
resp = wait_for_file(token=token,
|
250 |
-
job_id=job_id,
|
251 |
-
path="%2Fworkspace%2Fresult.png",
|
252 |
-
local_path="result.png")
|
253 |
-
print(resp)
|
254 |
|
|
|
1 |
import json
|
2 |
import os
|
3 |
import time
|
4 |
+
from typing import List, BinaryIO
|
5 |
import logging
|
6 |
logger = logging.getLogger()
|
7 |
|
8 |
import requests
|
9 |
+
import tempfile
|
10 |
|
11 |
SERVER_ADRESS="https://faircompute.com:8000/api/v1"
|
12 |
#SERVER_ADRESS="http://localhost:8000/api/v1"
|
|
|
100 |
return response.text
|
101 |
|
102 |
|
103 |
+
def wait_for_file(token, job_id, path, attempts=10) -> BinaryIO:
|
104 |
headers = {
|
105 |
'Authorization': f'Bearer {token}'
|
106 |
}
|
|
|
110 |
try:
|
111 |
with requests.get(url=url, headers=headers, stream=True) as r:
|
112 |
r.raise_for_status()
|
113 |
+
f = tempfile.TemporaryFile()
|
114 |
+
for chunk in r.iter_content(chunk_size=8192):
|
115 |
+
f.write(chunk)
|
116 |
|
117 |
+
print(f"File {path} ready")
|
118 |
+
f.seek(0, 0)
|
119 |
+
return f
|
120 |
except Exception as e:
|
121 |
print(e)
|
122 |
time.sleep(0.5)
|
123 |
|
124 |
+
print(f"Failed to receive {path}")
|
125 |
|
126 |
|
127 |
def text_to_image(text):
|
|
|
182 |
logger.info("Done!")
|
183 |
else:
|
184 |
logger.info("Job Failed")
|
185 |
+
file = wait_for_file(token=token,
|
186 |
job_id=job_id,
|
187 |
+
path="%2Fworkspace%2Fresult.png")
|
188 |
+
return file
|
|
|
|
|
189 |
|
190 |
|
191 |
if __name__=="__main__":
|
192 |
+
from PIL import Image
|
193 |
+
file = text_to_image(text="Robot dinozaur\n")
|
194 |
+
image = Image.open(file)
|
195 |
+
image.save("result.png")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
196 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
197 |
|