sukaka commited on
Commit
f1a0847
·
1 Parent(s): 2463c45

Upload watercolor_pencils_lora_v2/watercolor_pencils.toml with huggingface_hub

Browse files
watercolor_pencils_lora_v2/watercolor_pencils.toml ADDED
@@ -0,0 +1,38 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ pretrained_model_name_or_path = "sd-models/animefull-final-pruned.safetensors"
2
+ train_data_dir = "/kaggle/input/lora-train-picture/watercolor_pencils"
3
+ shuffle_caption = true
4
+ caption_extension = ".txt"
5
+ keep_tokens = 1
6
+ resolution = "512,916"
7
+ enable_bucket = true
8
+ output_dir = "/kaggle/working/output"
9
+ output_name = "watercolor_pencils"
10
+ save_precision = "fp16"
11
+ save_every_n_epochs = 1
12
+ train_batch_size = 2
13
+ max_token_length = 225
14
+ xformers = true
15
+ max_train_epochs = 15
16
+ seed = 1337
17
+ gradient_accumulation_steps = 64
18
+ mixed_precision = "fp16"
19
+ clip_skip = 2
20
+ logging_dir = "/kaggle/working/output/logs"
21
+ log_with = "wandb"
22
+ log_prefix = "watercolor_pencils"
23
+ log_tracker_name = "watercolor_pencils"
24
+ lowram = true
25
+ sample_every_n_epochs = 1
26
+ sample_prompts = "/kaggle/input/lora-train-picture/watercolor_pencils/tag.txt"
27
+ sample_sampler = "euler_a"
28
+ optimizer_type = "Lion"
29
+ learning_rate = 7e-5
30
+ optimizer_args = [ "weight_decay=0.01", "betas=.95,.98",]
31
+ lr_scheduler = "cosine_with_restarts"
32
+ min_snr_gamma = 5.0
33
+ unet_lr = 7e-5
34
+ text_encoder_lr = 8e-6
35
+ network_module = "networks.lora"
36
+ network_dim = 128
37
+ network_alpha = 64.0
38
+ training_comment = "watercolor_pencils_sukaka"