Renaming
Browse files- app.py +3 -3
- f_lite/__init__.py +5 -0
- {pikigen → f_lite}/model.py +0 -0
- {pikigen → f_lite}/pipeline.py +5 -5
- pikigen/__init__.py +0 -5
app.py
CHANGED
@@ -8,11 +8,11 @@ import google.generativeai as genai
|
|
8 |
|
9 |
import spaces
|
10 |
import torch
|
11 |
-
from
|
12 |
|
13 |
# Trick required because it is not a native diffusers model
|
14 |
from diffusers.pipelines.pipeline_loading_utils import LOADABLE_CLASSES, ALL_IMPORTABLE_CLASSES
|
15 |
-
LOADABLE_CLASSES["
|
16 |
ALL_IMPORTABLE_CLASSES["DiT"] = ["save_pretrained", "from_pretrained"]
|
17 |
|
18 |
load_dotenv()
|
@@ -33,7 +33,7 @@ if torch.cuda.is_available():
|
|
33 |
else:
|
34 |
torch_dtype = torch.float32
|
35 |
|
36 |
-
pipe =
|
37 |
# pipe.enable_model_cpu_offload() # For less memory consumption
|
38 |
pipe.to(device)
|
39 |
pipe.vae.enable_slicing()
|
|
|
8 |
|
9 |
import spaces
|
10 |
import torch
|
11 |
+
from f_lite import FLitePipeline
|
12 |
|
13 |
# Trick required because it is not a native diffusers model
|
14 |
from diffusers.pipelines.pipeline_loading_utils import LOADABLE_CLASSES, ALL_IMPORTABLE_CLASSES
|
15 |
+
LOADABLE_CLASSES["f_lite"] = LOADABLE_CLASSES["f_lite.model"] = {"DiT": ["save_pretrained", "from_pretrained"]}
|
16 |
ALL_IMPORTABLE_CLASSES["DiT"] = ["save_pretrained", "from_pretrained"]
|
17 |
|
18 |
load_dotenv()
|
|
|
33 |
else:
|
34 |
torch_dtype = torch.float32
|
35 |
|
36 |
+
pipe = FLitePipeline.from_pretrained(model_repo_id, torch_dtype=torch_dtype)
|
37 |
# pipe.enable_model_cpu_offload() # For less memory consumption
|
38 |
pipe.to(device)
|
39 |
pipe.vae.enable_slicing()
|
f_lite/__init__.py
ADDED
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from .pipeline import FLitePipeline, FLitePipelineOutput, APGConfig
|
2 |
+
from .model import DiT
|
3 |
+
|
4 |
+
|
5 |
+
__all__ = ["FLitePipeline", "FLitePipelineOutput", "APGConfig", "DiT"]
|
{pikigen → f_lite}/model.py
RENAMED
File without changes
|
{pikigen → f_lite}/pipeline.py
RENAMED
@@ -27,9 +27,9 @@ class APGConfig:
|
|
27 |
|
28 |
|
29 |
@dataclass
|
30 |
-
class
|
31 |
"""
|
32 |
-
Output class for
|
33 |
Args:
|
34 |
images (`List[PIL.Image.Image]` or `np.ndarray`)
|
35 |
List of denoised PIL images of length `batch_size` or numpy array of shape `(batch_size, height, width,
|
@@ -39,9 +39,9 @@ class PikigenPipelineOutput(BaseOutput):
|
|
39 |
images: Union[List[Image.Image], np.ndarray]
|
40 |
|
41 |
|
42 |
-
class
|
43 |
r"""
|
44 |
-
Pipeline for text-to-image generation using
|
45 |
This model inherits from [`DiffusionPipeline`].
|
46 |
"""
|
47 |
|
@@ -289,7 +289,7 @@ class PikigenPipeline(DiffusionPipeline):
|
|
289 |
images = (images * 255).round().clamp(0, 255).to(torch.uint8).cpu()
|
290 |
pil_images = [Image.fromarray(img.permute(1, 2, 0).numpy()) for img in images]
|
291 |
|
292 |
-
return
|
293 |
images=pil_images,
|
294 |
)
|
295 |
|
|
|
27 |
|
28 |
|
29 |
@dataclass
|
30 |
+
class FLitePipelineOutput(BaseOutput):
|
31 |
"""
|
32 |
+
Output class for FLitePipeline pipeline.
|
33 |
Args:
|
34 |
images (`List[PIL.Image.Image]` or `np.ndarray`)
|
35 |
List of denoised PIL images of length `batch_size` or numpy array of shape `(batch_size, height, width,
|
|
|
39 |
images: Union[List[Image.Image], np.ndarray]
|
40 |
|
41 |
|
42 |
+
class FLitePipeline(DiffusionPipeline):
|
43 |
r"""
|
44 |
+
Pipeline for text-to-image generation using FLite model.
|
45 |
This model inherits from [`DiffusionPipeline`].
|
46 |
"""
|
47 |
|
|
|
289 |
images = (images * 255).round().clamp(0, 255).to(torch.uint8).cpu()
|
290 |
pil_images = [Image.fromarray(img.permute(1, 2, 0).numpy()) for img in images]
|
291 |
|
292 |
+
return FLitePipelineOutput(
|
293 |
images=pil_images,
|
294 |
)
|
295 |
|
pikigen/__init__.py
DELETED
@@ -1,5 +0,0 @@
|
|
1 |
-
from .pipeline import PikigenPipeline, PikigenPipelineOutput, APGConfig
|
2 |
-
from .model import DiT
|
3 |
-
|
4 |
-
|
5 |
-
__all__ = ["PikigenPipeline", "PikigenPipelineOutput", "APGConfig", "DiT"]
|
|
|
|
|
|
|
|
|
|
|
|