Delete config.json
Browse files- config.json +0 -47
config.json
DELETED
@@ -1,47 +0,0 @@
|
|
1 |
-
[network_arguments]
|
2 |
-
unet_lr = 0.75
|
3 |
-
text_encoder_lr = 0.75
|
4 |
-
network_dim = 32
|
5 |
-
network_alpha = 32
|
6 |
-
network_module = "networks.lora"
|
7 |
-
network_args = [ "conv_dim=32", "conv_alpha=16",]
|
8 |
-
network_train_unet_only = false
|
9 |
-
|
10 |
-
[optimizer_arguments]
|
11 |
-
learning_rate = 0.75
|
12 |
-
lr_scheduler = "cosine_with_restarts"
|
13 |
-
lr_scheduler_num_cycles = 5
|
14 |
-
lr_warmup_steps = 34
|
15 |
-
optimizer_type = "Prodigy"
|
16 |
-
optimizer_args = [ "weight_decay=0.1", "betas=[0.9,0.99]",]
|
17 |
-
|
18 |
-
[training_arguments]
|
19 |
-
pretrained_model_name_or_path = "sd_xl_base_1.0.safetensors"
|
20 |
-
vae = "sdxl_vae.safetensors"
|
21 |
-
max_train_epochs = 10
|
22 |
-
train_batch_size = 4
|
23 |
-
seed = 42
|
24 |
-
max_token_length = 225
|
25 |
-
xformers = false
|
26 |
-
sdpa = true
|
27 |
-
min_snr_gamma = 8.0
|
28 |
-
lowram = false
|
29 |
-
no_half_vae = true
|
30 |
-
gradient_checkpointing = true
|
31 |
-
gradient_accumulation_steps = 1
|
32 |
-
max_data_loader_n_workers = 8
|
33 |
-
persistent_data_loader_workers = true
|
34 |
-
mixed_precision = "fp16"
|
35 |
-
full_bf16 = false
|
36 |
-
cache_latents = true
|
37 |
-
cache_latents_to_disk = true
|
38 |
-
cache_text_encoder_outputs = false
|
39 |
-
min_timestep = 0
|
40 |
-
max_timestep = 1000
|
41 |
-
prior_loss_weight = 1.0
|
42 |
-
|
43 |
-
[saving_arguments]
|
44 |
-
save_precision = "fp16"
|
45 |
-
save_model_as = "safetensors"
|
46 |
-
save_every_n_epochs = 1
|
47 |
-
save_last_n_epochs = 10
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|