ChefEase commited on
Commit
3011bdf
Β·
verified Β·
1 Parent(s): 5a6c002

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +4 -0
app.py CHANGED
@@ -2,9 +2,11 @@ import torch
2
  from PIL import Image
3
  import gradio as gr
4
  from huggingface_hub import hf_hub_download
 
5
 
6
  from src_inference.pipeline import FluxPipeline
7
  from src_inference.lora_helper import set_single_lora, clear_cache
 
8
 
9
  device = "cuda" if torch.cuda.is_available() else "cpu"
10
  print("Running on:", device)
@@ -47,8 +49,10 @@ lora_path = hf_hub_download(
47
  local_dir="./LoRAs"
48
  )
49
 
 
50
  pipe = FluxPipeline.from_pretrained(
51
  "black-forest-labs/FLUX.1-dev",
 
52
  torch_dtype=torch.bfloat16 if torch.cuda.is_available() else torch.float32
53
  ).to(device)
54
 
 
2
  from PIL import Image
3
  import gradio as gr
4
  from huggingface_hub import hf_hub_download
5
+ from huggingface_hub import HfFolder
6
 
7
  from src_inference.pipeline import FluxPipeline
8
  from src_inference.lora_helper import set_single_lora, clear_cache
9
+ import os
10
 
11
  device = "cuda" if torch.cuda.is_available() else "cpu"
12
  print("Running on:", device)
 
49
  local_dir="./LoRAs"
50
  )
51
 
52
+ token = os.environ.get("HF_TOKEN")
53
  pipe = FluxPipeline.from_pretrained(
54
  "black-forest-labs/FLUX.1-dev",
55
+ use_auth_token=token,
56
  torch_dtype=torch.bfloat16 if torch.cuda.is_available() else torch.float32
57
  ).to(device)
58