sukaka commited on
Commit
ea1ac90
·
1 Parent(s): e22d6f7

Upload test/Deemo.toml with huggingface_hub

Browse files
Files changed (1) hide show
  1. test/Deemo.toml +41 -0
test/Deemo.toml ADDED
@@ -0,0 +1,41 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ pretrained_model_name_or_path = "sd-models/animefull-final-pruned.safetensors"
2
+ train_data_dir = "/kaggle/input/lora-train-picture/Deemo2/Deemo"
3
+ shuffle_caption = true
4
+ caption_extension = ".txt"
5
+ keep_tokens = 5
6
+ resolution = "512,512"
7
+ cache_latents = true
8
+ enable_bucket = true
9
+ output_dir = "/kaggle/working/output"
10
+ output_name = "Deemo"
11
+ save_precision = "fp16"
12
+ save_every_n_epochs = 1
13
+ train_batch_size = 2
14
+ max_token_length = 225
15
+ xformers = true
16
+ max_train_epochs = 1
17
+ seed = 1337
18
+ gradient_accumulation_steps = 64
19
+ mixed_precision = "fp16"
20
+ clip_skip = 2
21
+ logging_dir = "/kaggle/working/logs"
22
+ log_with = "wandb"
23
+ log_prefix = "Deemo"
24
+ log_tracker_name = "Deemo"
25
+ multires_noise_iterations = 6
26
+ multires_noise_discount = 0.1
27
+ lowram = true
28
+ sample_every_n_epochs = 1
29
+ sample_prompts = "/kaggle/input/lora-train-picture/Deemo2/test-picture.txt"
30
+ sample_sampler = "euler_a"
31
+ optimizer_type = "Lion"
32
+ learning_rate = 8.067e-5
33
+ optimizer_args = [ "betas=.95,.98",]
34
+ lr_scheduler = "cosine_with_restarts"
35
+ min_snr_gamma = 5.0
36
+ unet_lr = 8.067e-5
37
+ text_encoder_lr = 2.689e-5
38
+ network_module = "networks.lora"
39
+ network_dim = 128
40
+ network_alpha = 64.0
41
+ training_comment = "Deemo-sukaka"