|
import argparse
|
|
import subprocess
|
|
import torch
|
|
import spaces
|
|
from transformers import AutoTokenizer,AutoModelForCausalLM
|
|
|
|
@torch.no_grad()
|
|
def main():
|
|
|
|
parser = argparse.ArgumentParser(description="启动 WebUI")
|
|
parser.add_argument(
|
|
"--llama-checkpoint-path",
|
|
type=str,
|
|
default="checkpoints/fish-speech-1.4-sft-yth-lora",
|
|
help="Llama 检查点路径",
|
|
)
|
|
parser.add_argument(
|
|
"--decoder-checkpoint-path",
|
|
type=str,
|
|
default="checkpoints/fish-speech-1.4/firefly-gan-vq-fsq-8x1024-21hz-generator.pth",
|
|
help="解码器检查点路径",
|
|
)
|
|
parser.add_argument(
|
|
"--decoder-config-name",
|
|
type=str,
|
|
default="firefly_gan_vq",
|
|
help="解码器配置名称",
|
|
)
|
|
parser.add_argument(
|
|
"--device",
|
|
type=str,
|
|
default="cpu",
|
|
help="设备类型",
|
|
)
|
|
|
|
|
|
args = parser.parse_args()
|
|
|
|
|
|
subprocess.run([
|
|
"python",
|
|
"tools/webui.py",
|
|
"--llama-checkpoint-path", args.llama_checkpoint_path,
|
|
"--decoder-checkpoint-path", args.decoder_checkpoint_path,
|
|
"--decoder-config-name", args.decoder_config_name,
|
|
"--device", args.device,
|
|
])
|
|
|
|
if __name__ == "__main__":
|
|
|
|
main()
|
|
|