Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -1,7 +1,6 @@
|
|
1 |
import subprocess
|
2 |
subprocess.run('pip install flash-attn --no-build-isolation', env={'FLASH_ATTENTION_SKIP_CUDA_BUILD': "TRUE"}, shell=True)
|
3 |
subprocess.run("mkdir -p ./checkpoints", shell=True)
|
4 |
-
subprocess.run("huggingface-cli download --resume-download Alpha-VLLM/Lumina-Next-T2I --local-dir ./checkpoints --local-dir-use-symlinks False", shell=True)
|
5 |
|
6 |
import argparse
|
7 |
import builtins
|
@@ -152,6 +151,8 @@ def load_model(args, master_port, rank):
|
|
152 |
assert train_args.model_parallel_size == args.num_gpus
|
153 |
if args.ema:
|
154 |
print("Loading ema model.")
|
|
|
|
|
155 |
ckpt = torch.load(
|
156 |
os.path.join(
|
157 |
args.ckpt,
|
@@ -439,7 +440,7 @@ def main():
|
|
439 |
# barrier = Barrier(args.num_gpus + 1)
|
440 |
for i in range(args.num_gpus):
|
441 |
text_encoder, tokenizer, vae, model = load_model(args, master_port, i)
|
442 |
-
|
443 |
generation_kwargs = dict(
|
444 |
args=args,
|
445 |
master_port=master_port,
|
|
|
1 |
import subprocess
|
2 |
subprocess.run('pip install flash-attn --no-build-isolation', env={'FLASH_ATTENTION_SKIP_CUDA_BUILD': "TRUE"}, shell=True)
|
3 |
subprocess.run("mkdir -p ./checkpoints", shell=True)
|
|
|
4 |
|
5 |
import argparse
|
6 |
import builtins
|
|
|
151 |
assert train_args.model_parallel_size == args.num_gpus
|
152 |
if args.ema:
|
153 |
print("Loading ema model.")
|
154 |
+
|
155 |
+
subprocess.run("huggingface-cli download --resume-download Alpha-VLLM/Lumina-Next-T2I --local-dir ./checkpoints --local-dir-use-symlinks False", shell=True)
|
156 |
ckpt = torch.load(
|
157 |
os.path.join(
|
158 |
args.ckpt,
|
|
|
440 |
# barrier = Barrier(args.num_gpus + 1)
|
441 |
for i in range(args.num_gpus):
|
442 |
text_encoder, tokenizer, vae, model = load_model(args, master_port, i)
|
443 |
+
request_queues.append(Queue())
|
444 |
generation_kwargs = dict(
|
445 |
args=args,
|
446 |
master_port=master_port,
|