HyperX-Sentience's picture
Upload config.json
ba36602 verified
raw
history blame
1.14 kB
[network_arguments]
unet_lr = 0.75
text_encoder_lr = 0.75
network_dim = 32
network_alpha = 32
network_module = "networks.lora"
network_args = [ "conv_dim=32", "conv_alpha=16",]
network_train_unet_only = false
[optimizer_arguments]
learning_rate = 0.75
lr_scheduler = "cosine_with_restarts"
lr_scheduler_num_cycles = 5
lr_warmup_steps = 34
optimizer_type = "Prodigy"
optimizer_args = [ "weight_decay=0.1", "betas=[0.9,0.99]",]
[training_arguments]
pretrained_model_name_or_path = "sd_xl_base_1.0.safetensors"
vae = "sdxl_vae.safetensors"
max_train_epochs = 10
train_batch_size = 4
seed = 42
max_token_length = 225
xformers = false
sdpa = true
min_snr_gamma = 8.0
lowram = false
no_half_vae = true
gradient_checkpointing = true
gradient_accumulation_steps = 1
max_data_loader_n_workers = 8
persistent_data_loader_workers = true
mixed_precision = "fp16"
full_bf16 = false
cache_latents = true
cache_latents_to_disk = true
cache_text_encoder_outputs = false
min_timestep = 0
max_timestep = 1000
prior_loss_weight = 1.0
[saving_arguments]
save_precision = "fp16"
save_model_as = "safetensors"
save_every_n_epochs = 1
save_last_n_epochs = 10