Maxivi commited on
Commit
38b35d1
·
verified ·
1 Parent(s): c3eee7b

Upload 17 files

Browse files
AishaKhan-step00000200.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:079800dbc37f7cc74f41c70f69e98a5988d81c1b0c219c4094e04296ac64c5b0
3
+ size 866899288
AishaKhan-step00000400.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:49b19841b0d6e4674833618ad6cea2f0972f6e1dda4a3d6ac1b2bb7248193a57
3
+ size 866899288
AishaKhan-step00000600.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3bc883da926f9c0f5d858df52c89bb0ffb14cf8808f1bf0d210ce30dc3603cc0
3
+ size 866899288
AishaKhan-step00000800.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f016f70de25221eeaa10a446870191ecda28be8b7a5a3d50a56914840d13254f
3
+ size 866899288
AishaKhan-step00001000.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cbc835e6c254a7bb778d89a615daf99918ca30adf978c7e4190faec84fce71a6
3
+ size 866899288
AishaKhan-step00001200.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e807901da79933659e5731bed72a051d19d09fba8bf5f60077c942a5fa90190d
3
+ size 866899288
AishaKhan-step00001400.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d452e5f0964b032b24dd16b4ba180cf5ae6cf052ba007d89fb152b982b9a6bfa
3
+ size 866899288
AishaKhan-step00001600.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c181193dfab6dde0ae942268c3a393231efbedc656506bce02c060ab27779409
3
+ size 866899288
AishaKhan-step00001800.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:91da386d256dc2080c622ad905078b576e0e3290286c4561bfd712d24b22892b
3
+ size 866899288
AishaKhan-step00002000.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:19e57f5ba0bd5a9be589041a185f8443daaf23050cbc535eec5a786c51edd449
3
+ size 866899288
AishaKhan-step00002200.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:65f4d96575281c5ffe9d2d23751d6f0075f12f028f7cbf6dc37801c46bf7c940
3
+ size 866899288
AishaKhan-step00002400.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e61e3761d53cd301e383fe161d11540b952ca618629fb274f5420263e15cc0d6
3
+ size 866899288
AishaKhan-step00002600.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f20f5fa8ba0632942e93c604404ad2f1e6f8ca429df48731ccdf7fd258c2a21f
3
+ size 866899296
AishaKhan-step00002800.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c9bf39e5313d1a25c74fbc487a8539601665c9e5cb43a300d6d37243710008eb
3
+ size 866899296
AishaKhan-step00003000.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6d116cd3b7c0910b0fc9668b84bd72b782ef61ddffd52c5098a25221a03e42b3
3
+ size 866899296
AishaKhan_20240911-054035.json ADDED
@@ -0,0 +1,185 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "LoRA_type": "Flux1",
3
+ "LyCORIS_preset": "full",
4
+ "adaptive_noise_scale": 0,
5
+ "additional_parameters": "--scale_weight_norms 1",
6
+ "ae": "/home/Ubuntu/Downloads/ae.safetensors",
7
+ "apply_t5_attn_mask": true,
8
+ "async_upload": false,
9
+ "block_alphas": "",
10
+ "block_dims": "",
11
+ "block_lr_zero_threshold": "",
12
+ "bucket_no_upscale": false,
13
+ "bucket_reso_steps": 32,
14
+ "bypass_mode": false,
15
+ "cache_latents": true,
16
+ "cache_latents_to_disk": true,
17
+ "caption_dropout_every_n_epochs": 0,
18
+ "caption_dropout_rate": 0,
19
+ "caption_extension": ".txt",
20
+ "clip_l": "/home/Ubuntu/Downloads/clip_l.safetensors",
21
+ "clip_skip": 0,
22
+ "color_aug": false,
23
+ "constrain": 0,
24
+ "conv_alpha": 1,
25
+ "conv_block_alphas": "",
26
+ "conv_block_dims": "",
27
+ "conv_dim": 1,
28
+ "cpu_offload_checkpointing": false,
29
+ "dataset_config": "/home/Ubuntu/Documents/dataset.toml",
30
+ "debiased_estimation_loss": false,
31
+ "decompose_both": false,
32
+ "dim_from_weights": false,
33
+ "discrete_flow_shift": 0,
34
+ "dora_wd": false,
35
+ "down_lr_weight": "",
36
+ "dynamo_backend": "no",
37
+ "dynamo_mode": "default",
38
+ "dynamo_use_dynamic": false,
39
+ "dynamo_use_fullgraph": false,
40
+ "enable_bucket": true,
41
+ "epoch": 10,
42
+ "extra_accelerate_launch_args": "",
43
+ "factor": -1,
44
+ "flip_aug": false,
45
+ "flux1_cache_text_encoder_outputs": false,
46
+ "flux1_cache_text_encoder_outputs_to_disk": false,
47
+ "flux1_checkbox": true,
48
+ "fp8_base": true,
49
+ "fp8_base_unet": true,
50
+ "full_bf16": true,
51
+ "full_fp16": false,
52
+ "gpu_ids": "",
53
+ "gradient_accumulation_steps": 1,
54
+ "gradient_checkpointing": true,
55
+ "guidance_scale": 1,
56
+ "highvram": true,
57
+ "huber_c": 0.1,
58
+ "huber_schedule": "snr",
59
+ "huggingface_path_in_repo": "",
60
+ "huggingface_repo_id": "",
61
+ "huggingface_repo_type": "",
62
+ "huggingface_repo_visibility": "",
63
+ "huggingface_token": "",
64
+ "ip_noise_gamma": 0,
65
+ "ip_noise_gamma_random_strength": false,
66
+ "keep_tokens": 0,
67
+ "learning_rate": 0.0001,
68
+ "log_config": true,
69
+ "log_tracker_config": "",
70
+ "log_tracker_name": "Adamw44ccc",
71
+ "log_with": "wandb",
72
+ "logging_dir": "/home/Ubuntu/Documents/logs",
73
+ "loraplus_lr_ratio": 0,
74
+ "loraplus_text_encoder_lr_ratio": 0,
75
+ "loraplus_unet_lr_ratio": 0,
76
+ "loss_type": "l2",
77
+ "lowvram": false,
78
+ "lr_scheduler": "constant",
79
+ "lr_scheduler_args": "",
80
+ "lr_scheduler_num_cycles": 1,
81
+ "lr_scheduler_power": 1,
82
+ "lr_scheduler_type": "",
83
+ "lr_warmup": 0,
84
+ "main_process_port": 0,
85
+ "masked_loss": false,
86
+ "max_bucket_reso": 2048,
87
+ "max_data_loader_n_workers": 0,
88
+ "max_grad_norm": 0,
89
+ "max_resolution": "512,512",
90
+ "max_timestep": 1000,
91
+ "max_token_length": 75,
92
+ "max_train_epochs": 0,
93
+ "max_train_steps": 3000,
94
+ "mem_eff_attn": false,
95
+ "mem_eff_save": true,
96
+ "metadata_author": "",
97
+ "metadata_description": "",
98
+ "metadata_license": "",
99
+ "metadata_tags": "",
100
+ "metadata_title": "",
101
+ "mid_lr_weight": "",
102
+ "min_bucket_reso": 256,
103
+ "min_snr_gamma": 10,
104
+ "min_timestep": 0,
105
+ "mixed_precision": "bf16",
106
+ "model_list": "custom",
107
+ "model_prediction_type": "raw",
108
+ "module_dropout": 0,
109
+ "multi_gpu": false,
110
+ "multires_noise_discount": 0.3,
111
+ "multires_noise_iterations": 0,
112
+ "network_alpha": 64,
113
+ "network_dim": 64,
114
+ "network_dropout": 0,
115
+ "network_weights": "",
116
+ "noise_offset": 0.0357,
117
+ "noise_offset_random_strength": false,
118
+ "noise_offset_type": "Original",
119
+ "num_cpu_threads_per_process": 2,
120
+ "num_machines": 1,
121
+ "num_processes": 1,
122
+ "optimizer": "Adafactor",
123
+ "optimizer_args": "scale_parameter=False relative_step=False warmup_init=False weight_decay=0.01",
124
+ "output_dir": "/home/Ubuntu/Documents/model47sssssssss753",
125
+ "output_name": "AishaKhan",
126
+ "persistent_data_loader_workers": false,
127
+ "pretrained_model_name_or_path": "/home/Ubuntu/Downloads/flux1-dev.safetensors",
128
+ "prior_loss_weight": 1,
129
+ "random_crop": false,
130
+ "rank_dropout": 0,
131
+ "rank_dropout_scale": false,
132
+ "reg_data_dir": "",
133
+ "rescaled": false,
134
+ "resume": "",
135
+ "resume_from_huggingface": "",
136
+ "sample_every_n_epochs": 0,
137
+ "sample_every_n_steps": 200,
138
+ "sample_prompts": "masterpiece, best quality, ohwxak woman, in white shirt, portrait, looking at viewer, simple background --w 1024 --h 1024 --s 20 --l 3.5\nAnime illustration portrait of ohwxak woman, forest with lake in the background, high quality, beautiful face, action pose, clear focus, detailed and fantasy setting --w 1024 --h 1024 --s 20 --l 3.5\nComic illustration portrait of ohwxak woman, forest with lake in the background, high quality, beautiful face, action pose, clear focus, detailed and fantasy setting --w 1024 --h 1024 --s 20 --l 3.5\nohwxak woman,High res closeup portrait photo of an actress dressed as Harley Quinn, with full makeup, f /2.8, Canon, 85mm,cinematic, high quality, skin texture, looking at the camera --w 1024 --h 1024 --s 20 --l 3.5",
139
+ "sample_sampler": "euler_a",
140
+ "save_every_n_epochs": 1,
141
+ "save_every_n_steps": 200,
142
+ "save_last_n_steps": 0,
143
+ "save_last_n_steps_state": 0,
144
+ "save_model_as": "safetensors",
145
+ "save_precision": "bf16",
146
+ "save_state": false,
147
+ "save_state_on_train_end": false,
148
+ "save_state_to_huggingface": false,
149
+ "scale_v_pred_loss_like_noise_pred": false,
150
+ "scale_weight_norms": 0,
151
+ "sdxl": false,
152
+ "sdxl_cache_text_encoder_outputs": false,
153
+ "sdxl_no_half_vae": false,
154
+ "seed": 42,
155
+ "shuffle_caption": false,
156
+ "split_mode": false,
157
+ "split_qkv": false,
158
+ "stop_text_encoder_training": 0,
159
+ "t5xxl": "/home/Ubuntu/Downloads/t5xxl_fp16.safetensors",
160
+ "t5xxl_max_token_length": 512,
161
+ "text_encoder_lr": 0.0001,
162
+ "timestep_sampling": "flux_shift",
163
+ "train_batch_size": 1,
164
+ "train_blocks": "all",
165
+ "train_data_dir": "",
166
+ "train_norm": false,
167
+ "train_on_input": true,
168
+ "train_t5xxl": true,
169
+ "training_comment": "",
170
+ "unet_lr": 0.0001,
171
+ "unit": 1,
172
+ "up_lr_weight": "",
173
+ "use_cp": false,
174
+ "use_scalar": false,
175
+ "use_tucker": false,
176
+ "v2": false,
177
+ "v_parameterization": false,
178
+ "v_pred_like_loss": 0,
179
+ "vae": "/home/Ubuntu/Downloads/clip_l.safetensors",
180
+ "vae_batch_size": 0,
181
+ "wandb_api_key": "53e2b86d002b17be4f5eef2174e4bd05916330bf",
182
+ "wandb_run_name": "Adamw44ccc",
183
+ "weighted_captions": false,
184
+ "xformers": "sdpa"
185
+ }
config_lora-20240911-054035.toml ADDED
@@ -0,0 +1,69 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ae = "/home/Ubuntu/Downloads/ae.safetensors"
2
+ apply_t5_attn_mask = true
3
+ bucket_reso_steps = 32
4
+ cache_latents = true
5
+ cache_latents_to_disk = true
6
+ caption_extension = ".txt"
7
+ clip_l = "/home/Ubuntu/Downloads/clip_l.safetensors"
8
+ dataset_config = "/home/Ubuntu/Documents/dataset.toml"
9
+ dynamo_backend = "no"
10
+ enable_bucket = true
11
+ epoch = 10
12
+ fp8_base = true
13
+ fp8_base_unet = true
14
+ full_bf16 = true
15
+ gradient_accumulation_steps = 1
16
+ gradient_checkpointing = true
17
+ guidance_scale = 1.0
18
+ highvram = true
19
+ huber_c = 0.1
20
+ huber_schedule = "snr"
21
+ log_config = true
22
+ log_tracker_name = "Adamw44ccc"
23
+ log_with = "wandb"
24
+ logging_dir = "/home/Ubuntu/Documents/logs"
25
+ loss_type = "l2"
26
+ lr_scheduler = "constant"
27
+ lr_scheduler_args = []
28
+ lr_scheduler_num_cycles = 1
29
+ lr_scheduler_power = 1
30
+ max_bucket_reso = 2048
31
+ max_data_loader_n_workers = 0
32
+ max_timestep = 1000
33
+ max_train_steps = 3000
34
+ mem_eff_save = true
35
+ min_bucket_reso = 256
36
+ min_snr_gamma = 10
37
+ mixed_precision = "bf16"
38
+ model_prediction_type = "raw"
39
+ network_alpha = 64
40
+ network_args = [ "train_t5xxl=True",]
41
+ network_dim = 64
42
+ network_module = "networks.lora_flux"
43
+ noise_offset = 0.0357
44
+ noise_offset_type = "Original"
45
+ optimizer_args = [ "scale_parameter=False", "relative_step=False", "warmup_init=False", "weight_decay=0.01",]
46
+ optimizer_type = "Adafactor"
47
+ output_dir = "/home/Ubuntu/Documents/model47sssssssss753"
48
+ output_name = "AishaKhan"
49
+ pretrained_model_name_or_path = "/home/Ubuntu/Downloads/flux1-dev.safetensors"
50
+ prior_loss_weight = 1
51
+ resolution = "512,512"
52
+ sample_every_n_steps = 200
53
+ sample_prompts = "/home/Ubuntu/Documents/model47sssssssss753/sample/prompt.txt"
54
+ sample_sampler = "euler_a"
55
+ save_every_n_epochs = 1
56
+ save_every_n_steps = 200
57
+ save_model_as = "safetensors"
58
+ save_precision = "bf16"
59
+ sdpa = true
60
+ seed = 42
61
+ t5xxl = "/home/Ubuntu/Downloads/t5xxl_fp16.safetensors"
62
+ t5xxl_max_token_length = 512
63
+ text_encoder_lr = 0.0001
64
+ timestep_sampling = "flux_shift"
65
+ train_batch_size = 1
66
+ unet_lr = 0.0001
67
+ vae = "/home/Ubuntu/Downloads/clip_l.safetensors"
68
+ wandb_api_key = "53e2b86d002b17be4f5eef2174e4bd05916330bf"
69
+ wandb_run_name = "Adamw44ccc"