[network_arguments] unet_lr = 6e-5 text_encoder_lr = 1e-6 network_dim = 16 network_alpha = 8 network_module = "networks.lora" network_train_unet_only = false [optimizer_arguments] learning_rate = 6e-5 lr_scheduler = "cosine" lr_scheduler_type = "LoraEasyCustomOptimizer.RexAnnealingWarmRestarts.RexAnnealingWarmRestarts" lr_scheduler_args = [ "min_lr=1e-9", "gamma=0.9", "d=0.9", "first_cycle_max_steps=1240",] optimizer_type = "LoraEasyCustomOptimizer.came.CAME" optimizer_args = [ "weight_decay=0.04",] loss_type = "l2" max_grad_norm = 1.0 [training_arguments] lowram = true pretrained_model_name_or_path = "/content/Illustrious-XL-v0.1.safetensors" vae = "/content/sdxl_vae.safetensors" max_train_epochs = 10 train_batch_size = 8 seed = 42 max_token_length = 225 xformers = false sdpa = true min_snr_gamma = 8.0 ip_noise_gamma = 0.05 no_half_vae = true gradient_checkpointing = true gradient_accumulation_steps = 1 max_data_loader_n_workers = 1 persistent_data_loader_workers = true mixed_precision = "bf16" full_bf16 = true full_fp16 = false cache_latents = true cache_latents_to_disk = true cache_text_encoder_outputs = false min_timestep = 0 max_timestep = 1000 prior_loss_weight = 1.0 multires_noise_iterations = 6 multires_noise_discount = 0.3 [saving_arguments] save_precision = "fp16" save_model_as = "safetensors" save_every_n_epochs = 1 save_last_n_epochs = 4 output_name = "fubuki_illus" output_dir = "/content/drive/MyDrive/Loras/fubuki_illus/output" log_prefix = "fubuki_illus" logging_dir = "/content/drive/MyDrive/Loras/_logs" wandb_api_key = "ef4d900d0c1f0dcfc295578a23bda6fc5805b7e2" log_with = "wandb"