Spaces:
Running
Running
Inicio
Browse files- .gitignore +3 -0
- Dockerfile +11 -0
- LICENSE +21 -0
- app.py +32 -0
- enlaces.py +67 -0
- funciones.py +30 -0
- requirements.txt +6 -0
- tester.py +3 -0
.gitignore
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
/venv/
|
2 |
+
/__pycache__/
|
3 |
+
bridges.py
|
Dockerfile
ADDED
@@ -0,0 +1,11 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
FROM python:3.13
|
2 |
+
|
3 |
+
WORKDIR /code
|
4 |
+
|
5 |
+
COPY ./requirements.txt /code/requirements.txt
|
6 |
+
|
7 |
+
RUN pip install --no-cache-dir --upgrade -r /code/requirements.txt
|
8 |
+
|
9 |
+
COPY . .
|
10 |
+
|
11 |
+
CMD ["uvicorn", "app:app", "--host", "0.0.0.0", "--port", "7860"]
|
LICENSE
ADDED
@@ -0,0 +1,21 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
MIT License
|
2 |
+
|
3 |
+
Copyright (c) 2025 Moibe
|
4 |
+
|
5 |
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
6 |
+
of this software and associated documentation files (the "Software"), to deal
|
7 |
+
in the Software without restriction, including without limitation the rights
|
8 |
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
9 |
+
copies of the Software, and to permit persons to whom the Software is
|
10 |
+
furnished to do so, subject to the following conditions:
|
11 |
+
|
12 |
+
The above copyright notice and this permission notice shall be included in all
|
13 |
+
copies or substantial portions of the Software.
|
14 |
+
|
15 |
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
16 |
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
17 |
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
18 |
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
19 |
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
20 |
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
21 |
+
SOFTWARE.
|
app.py
ADDED
@@ -0,0 +1,32 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from fastapi import FastAPI, File, UploadFile
|
2 |
+
from fastapi.responses import StreamingResponse
|
3 |
+
|
4 |
+
import io
|
5 |
+
from io import BytesIO
|
6 |
+
|
7 |
+
from fastapi import FastAPI, Form
|
8 |
+
|
9 |
+
import funciones
|
10 |
+
|
11 |
+
app = FastAPI()
|
12 |
+
|
13 |
+
@app.post("/echo-image/")
|
14 |
+
async def echo_image(image: UploadFile = File(...)):
|
15 |
+
if not image.content_type.startswith("image/"):
|
16 |
+
return {"error": "El archivo no es una imagen"}
|
17 |
+
|
18 |
+
contents = await image.read()
|
19 |
+
return StreamingResponse(BytesIO(contents), media_type=image.content_type)
|
20 |
+
|
21 |
+
@app.post("/get-platillo/")
|
22 |
+
async def get_platillo_image(prompt: str = Form(...)):
|
23 |
+
|
24 |
+
imagen_pil = funciones.genera_platillo(prompt)
|
25 |
+
|
26 |
+
img_io = io.BytesIO()
|
27 |
+
imagen_pil.save(img_io, "PNG")
|
28 |
+
img_io.seek(0)
|
29 |
+
|
30 |
+
#Ver cual es el mejor resultado para backend, si forzar la disposición de archivo o permitir que el navegador decida (puede que sea irrelevante para un consumo de la api directo)
|
31 |
+
#return StreamingResponse(content=img_io, media_type="image/png", headers={"Content-Disposition": "attachment; filename=platillo.png"})
|
32 |
+
return StreamingResponse(content=img_io, media_type="image/png")
|
enlaces.py
ADDED
@@ -0,0 +1,67 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
menu = [
|
2 |
+
"Ensalada de atún con jitomate y queso",
|
3 |
+
"Tuna and tomato salad with cheese",
|
4 |
+
"Sopa de almeja con guisantes",
|
5 |
+
"Pollo a la plancha con arroz y ensalada de lechuga",
|
6 |
+
"Tofu sobre rebanada de pan blanco"
|
7 |
+
"Filete de carne con papas y ensalada verde"
|
8 |
+
]
|
9 |
+
|
10 |
+
espacios = [
|
11 |
+
("stabilityai/stable-diffusion", "stabilityai/stable-diffusion"),
|
12 |
+
("stabilityai/stable-diffusion-3-medium", "stabilityai/stable-diffusion-3-medium"),
|
13 |
+
# ("stabilityai/stable-diffusion-3.5-large", "stabilityai/stable-diffusion-3.5-large"),
|
14 |
+
("black-forest-labs/FLUX.1-schnell", "black-forest-labs/FLUX.1-schnell"),
|
15 |
+
("black-forest-labs/FLUX.1-dev", "black-forest-labs/FLUX.1-dev")
|
16 |
+
]
|
17 |
+
|
18 |
+
modelos = [
|
19 |
+
("stabilityai/stable-diffusion-xl-base-1.0", "stabilityai/stable-diffusion-xl-base-1.0"),
|
20 |
+
("stabilityai/stable-diffusion-2", "stabilityai/stable-diffusion-2"),
|
21 |
+
("stabilityai/stable-diffusion-2-1", "stabilityai/stable-diffusion-2-1"),
|
22 |
+
("stabilityai/stable-diffusion-3-medium", "stabilityai/stable-diffusion-3-medium"),
|
23 |
+
("stabilityai/stable-diffusion-3.5-large", "stabilityai/stable-diffusion-3.5-large"),
|
24 |
+
("black-forest-labs/FLUX.1-dev", "black-forest-labs/FLUX.1-dev"),
|
25 |
+
("black-forest-labs/FLUX.1-schnell", "black-forest-labs/FLUX.1-schnell")
|
26 |
+
]
|
27 |
+
|
28 |
+
proveedores_default = [
|
29 |
+
("HF Inference", "hf-inference"),
|
30 |
+
]
|
31 |
+
|
32 |
+
proveedores_todos = [
|
33 |
+
("HF Inference", "hf-inference"),
|
34 |
+
("Together AI", "together"),
|
35 |
+
("Fal AI", "fal-ai"),
|
36 |
+
("Replicate", "replicate"),
|
37 |
+
("Nebius", "nebius")
|
38 |
+
]
|
39 |
+
|
40 |
+
proveedores_sd1 = [
|
41 |
+
("HF Inference", "hf-inference"),
|
42 |
+
("Together AI", "together"),
|
43 |
+
("Replicate", "replicate"),
|
44 |
+
("Nebius", "nebius")
|
45 |
+
]
|
46 |
+
|
47 |
+
proveedores_sd2 = [
|
48 |
+
("HF Inference", "hf-inference"),
|
49 |
+
]
|
50 |
+
|
51 |
+
proveedores_sd3_medium = [
|
52 |
+
("Fal AI", "fal-ai"),
|
53 |
+
]
|
54 |
+
|
55 |
+
proveedores_sd3_large = [
|
56 |
+
("HF Inference", "hf-inference"),
|
57 |
+
("Fal AI", "fal-ai"),
|
58 |
+
("Replicate", "replicate")
|
59 |
+
]
|
60 |
+
|
61 |
+
proveedores_flux = [
|
62 |
+
("HF Inference", "hf-inference"),
|
63 |
+
("Together AI", "together"),
|
64 |
+
("Fal AI", "fal-ai"),
|
65 |
+
("Replicate", "replicate"),
|
66 |
+
("Nebius", "nebius")
|
67 |
+
]
|
funciones.py
ADDED
@@ -0,0 +1,30 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import bridges
|
2 |
+
from huggingface_hub import InferenceClient
|
3 |
+
from PIL import Image
|
4 |
+
|
5 |
+
def genera_platillo(prompt):
|
6 |
+
enlace = "black-forest-labs/FLUX.1-dev"
|
7 |
+
proveedor = "hf-inference"
|
8 |
+
|
9 |
+
client = InferenceClient(
|
10 |
+
provider= proveedor,
|
11 |
+
api_key=bridges.hug
|
12 |
+
)
|
13 |
+
|
14 |
+
#Testing change,
|
15 |
+
|
16 |
+
try:
|
17 |
+
image = client.text_to_image(
|
18 |
+
prompt,
|
19 |
+
model=enlace,
|
20 |
+
#seed=42, #default varía pero el default es que siempre sea la misma.
|
21 |
+
#guidance_scale=7.5,
|
22 |
+
#num_inference_steps=50,
|
23 |
+
#width=1024, #El default es 1024 x 1024 y quizá 1024*768, el max es 1536.
|
24 |
+
#height=1024 #El límite de replicate es 1024.
|
25 |
+
)
|
26 |
+
|
27 |
+
return image
|
28 |
+
|
29 |
+
except Exception as e:
|
30 |
+
print("Excepción es: ", e)
|
requirements.txt
ADDED
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
fastapi
|
2 |
+
fastapi[standard]
|
3 |
+
|
4 |
+
huggingface_hub
|
5 |
+
|
6 |
+
Pillow
|
tester.py
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
import funciones
|
2 |
+
|
3 |
+
funciones.genera_platillo()
|