Illustrious2.0-lora-Vpred-conversion-experiments / Illustrious-v2-512res-b256-lr3e5-altereddebiased-edm2-laplace-locondora-ztsnr-vpredconv /Illustrious-v2-512res-b256-lr4e5-altereddebiased-edm2-laplace-locondora-ztsnr-vpredconv_3.toml
Norwaere's picture
Upload 136 files
ad224b7 verified
raw
history blame
4.32 kB
[[subsets]]
caption_extension = ".txt"
caption_tag_dropout_rate = 0.1
image_dir = "D:/datasets/reg2"
name = "reg2"
num_repeats = 1
random_crop = true
random_crop_padding_percent = 0.06
shuffle_caption = true
[[subsets]]
caption_extension = ".txt"
caption_tag_dropout_rate = 0.1
image_dir = "Y:/stable-diffusion/datasets/Artoria tiny"
keep_tokens = 1
name = "ishiri small"
num_repeats = 1
random_crop = true
random_crop_padding_percent = 0.06
shuffle_caption = true
[[subsets]]
caption_extension = ".txt"
caption_tag_dropout_rate = 0.1
image_dir = "D:/datasets/reg"
name = "reg"
num_repeats = 1
random_crop = true
random_crop_padding_percent = 0.06
shuffle_caption = true
[[subsets]]
caption_extension = ".txt"
caption_tag_dropout_rate = 0.1
image_dir = "D:/datasets/mix"
name = "mix"
num_repeats = 1
random_crop = true
random_crop_padding_percent = 0.06
shuffle_caption = true
[[subsets]]
caption_extension = ".txt"
caption_tag_dropout_rate = 0.1
image_dir = "D:/datasets/vpred conversion"
name = "Vpred set"
num_repeats = 1
random_crop = true
random_crop_padding_percent = 0.06
shuffle_caption = true
[train_mode]
train_mode = "lora"
[general_args.args]
persistent_data_loader_workers = true
pretrained_model_name_or_path = "Y:/stable-diffusion/models/Stable-diffusion/Illustrious-XL-v20.safetensors"
debiased_estimation_loss = true
mixed_precision = "bf16"
gradient_checkpointing = true
seed = 8602366.0
max_data_loader_n_workers = 4
max_token_length = 225
prior_loss_weight = 1.0
xformers = true
max_train_epochs = 50
sdxl = true
v_parameterization = true
gradient_accumulation_steps = 30
training_comment = "debiased estimation with sqrt(snr_t) / (snr_t+1) edm2 loss and laplace timestep sampling based on edm2"
[general_args.dataset_args]
resolution = 512
batch_size = 12
[network_args.args]
network_dim = 64
network_alpha = 128.0
min_timestep = 0
max_timestep = 1000
network_train_unet_only = true
[optimizer_args.args]
optimizer_type = "AdamW8bitAO"
lr_scheduler = "constant_with_warmup"
loss_type = "l2"
warmup_ratio = 0.02
max_grad_norm = 0.1
zero_terminal_snr = true
learning_rate = 4e-5
[saving_args.args]
save_precision = "bf16"
save_model_as = "safetensors"
save_every_n_epochs = 1
save_toml = true
output_dir = "Y:/stable-diffusion/lora/derrian_distro/models/Illustrious-v2-512res-b256-lr3e5-altereddebiased-edm2-laplace-locondora-ztsnr-vpredconv"
save_toml_location = "Y:/stable-diffusion/lora/derrian_distro/models/Illustrious-v2-512res-b256-lr3e5-altereddebiased-edm2-laplace-locondora-ztsnr-vpredconv"
output_name = "Illustrious-v2-512res-b256-lr4e5-altereddebiased-edm2-laplace-locondora-ztsnr-vpredconv"
[logging_args.args]
log_prefix_mode = "disabled"
run_name_mode = "default"
log_with = "tensorboard"
logging_dir = "Y:/stable-diffusion/lora/derrian_distro/models/Illustrious-v2-512res-b256-lr3e5-altereddebiased-edm2-laplace-locondora-ztsnr-vpredconv"
[edm_loss_args.args]
edm2_loss_weighting_max_grad_norm = "0"
edm2_loss_weighting_initial_weights = ""
edm2_loss_weighting_num_channels = 448
edm2_loss_weighting = true
edm2_loss_weighting_generate_graph = true
edm2_loss_weighting_generate_graph_y_limit = 20
edm2_loss_weighting_generate_graph_every_x_steps = 5
edm2_loss_weighting_optimizer_lr = "5e-3"
edm2_loss_weighting_optimizer = "LoraEasyCustomOptimizer.fmarscrop.FMARSCropV2ExMachina"
edm2_loss_weighting_optimizer_args = "{'update_strategy':'cautious', 'gamma':0.0, 'betas':(0.99,0.9999,0.999), 'adaptive_clip':0}"
edm2_loss_weighting_lr_scheduler = true
edm2_loss_weighting_lr_scheduler_warmup_percent = 0.1
edm2_loss_weighting_lr_scheduler_constant_percent = 0.9
edm2_loss_weighting_generate_graph_output_dir = "Y:/stable-diffusion/lora/derrian_distro/models/Illustrious-v2-512res-b256-lr3e5-altereddebiased-edm2-laplace-locondora-ztsnr-vpredconv"
[extra_args.args]
edm2_loss_weighting_laplace = "True"
[bucket_args.dataset_args]
enable_bucket = true
min_bucket_reso = 256
bucket_reso_steps = 128
max_bucket_reso = 768
[network_args.args.network_args]
conv_dim = 64
conv_alpha = 128.0
algo = "locon"
dora_wd = true
[optimizer_args.args.optimizer_args]
weight_decay = "0.042"
betas = "0.9,0.99"
bf16_stochastic_round = "True"