Spaces:
Paused
Paused
uuu
Browse files
pipeline_stable_diffusion_3_ipa.py
CHANGED
|
@@ -862,7 +862,7 @@ class StableDiffusion3Pipeline(DiffusionPipeline, SD3LoraLoaderMixin, FromSingle
|
|
| 862 |
return self._interrupt
|
| 863 |
|
| 864 |
|
| 865 |
-
@torch.
|
| 866 |
def init_ipadapter(self, ip_adapter_path, image_encoder_path, nb_token, output_dim=2432):
|
| 867 |
from transformers import SiglipVisionModel, SiglipImageProcessor
|
| 868 |
state_dict = torch.load(ip_adapter_path, map_location="cpu")
|
|
@@ -920,7 +920,7 @@ class StableDiffusion3Pipeline(DiffusionPipeline, SD3LoraLoaderMixin, FromSingle
|
|
| 920 |
print(f"=> loading ip_adapter: {key_name}")
|
| 921 |
|
| 922 |
|
| 923 |
-
@torch.
|
| 924 |
def encode_clip_image_emb(self, clip_image, device, dtype):
|
| 925 |
|
| 926 |
# clip
|
|
|
|
| 862 |
return self._interrupt
|
| 863 |
|
| 864 |
|
| 865 |
+
@torch.no_grad()
|
| 866 |
def init_ipadapter(self, ip_adapter_path, image_encoder_path, nb_token, output_dim=2432):
|
| 867 |
from transformers import SiglipVisionModel, SiglipImageProcessor
|
| 868 |
state_dict = torch.load(ip_adapter_path, map_location="cpu")
|
|
|
|
| 920 |
print(f"=> loading ip_adapter: {key_name}")
|
| 921 |
|
| 922 |
|
| 923 |
+
@torch.no_grad()
|
| 924 |
def encode_clip_image_emb(self, clip_image, device, dtype):
|
| 925 |
|
| 926 |
# clip
|