1inkusFace commited on
Commit
e6b1d9d
·
verified ·
1 Parent(s): 119381d

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +6 -6
app.py CHANGED
@@ -185,8 +185,8 @@ def generate_30(
185
  ):
186
  seed = random.randint(0, MAX_SEED)
187
  generator = torch.Generator(device='cuda').manual_seed(seed)
188
- #pipe.text_encoder=text_encoder.to(device=device, dtype=torch.bfloat16)
189
- pipe.text_encoder_2=text_encoder_2.to(device=device, dtype=torch.bfloat16)
190
  options = {
191
  "prompt": [prompt],
192
  "negative_prompt": [negative_prompt],
@@ -227,8 +227,8 @@ def generate_60(
227
  ):
228
  seed = random.randint(0, MAX_SEED)
229
  generator = torch.Generator(device='cuda').manual_seed(seed)
230
- #pipe.text_encoder=text_encoder.to(device=device, dtype=torch.bfloat16)
231
- pipe.text_encoder_2=text_encoder_2.to(device=device, dtype=torch.bfloat16)
232
  options = {
233
  "prompt": [prompt],
234
  "negative_prompt": [negative_prompt],
@@ -269,8 +269,8 @@ def generate_90(
269
  ):
270
  seed = random.randint(0, MAX_SEED)
271
  generator = torch.Generator(device='cuda').manual_seed(seed)
272
- #pipe.text_encoder=text_encoder.to(device=device, dtype=torch.bfloat16)
273
- pipe.text_encoder_2=text_encoder_2.to(device=device, dtype=torch.bfloat16)
274
  options = {
275
  "prompt": [prompt],
276
  "negative_prompt": [negative_prompt],
 
185
  ):
186
  seed = random.randint(0, MAX_SEED)
187
  generator = torch.Generator(device='cuda').manual_seed(seed)
188
+ pipe.text_encoder=text_encoder.to(device=device, dtype=torch.bfloat16)
189
+ #pipe.text_encoder_2=text_encoder_2.to(device=device, dtype=torch.bfloat16)
190
  options = {
191
  "prompt": [prompt],
192
  "negative_prompt": [negative_prompt],
 
227
  ):
228
  seed = random.randint(0, MAX_SEED)
229
  generator = torch.Generator(device='cuda').manual_seed(seed)
230
+ pipe.text_encoder=text_encoder.to(device=device, dtype=torch.bfloat16)
231
+ #pipe.text_encoder_2=text_encoder_2.to(device=device, dtype=torch.bfloat16)
232
  options = {
233
  "prompt": [prompt],
234
  "negative_prompt": [negative_prompt],
 
269
  ):
270
  seed = random.randint(0, MAX_SEED)
271
  generator = torch.Generator(device='cuda').manual_seed(seed)
272
+ pipe.text_encoder=text_encoder.to(device=device, dtype=torch.bfloat16)
273
+ #pipe.text_encoder_2=text_encoder_2.to(device=device, dtype=torch.bfloat16)
274
  options = {
275
  "prompt": [prompt],
276
  "negative_prompt": [negative_prompt],