ljp commited on
Commit
ab1f001
·
1 Parent(s): 53c1495

Upload 2 files

Browse files
Files changed (1) hide show
  1. config.json +20 -0
config.json ADDED
@@ -0,0 +1,20 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_class_name": "FluxControlNetModel",
3
+ "_diffusers_version": "0.30.2",
4
+ "_name_or_path": "/data/oss_bucket_0/linjinpeng.ljp/exp_flux/r768_bs96_adamw_lr5e-6_bf16_cfg3.5_sin0_dou6_s11/checkpoint-50000",
5
+ "attention_head_dim": 128,
6
+ "axes_dims_rope": [
7
+ 16,
8
+ 56,
9
+ 56
10
+ ],
11
+ "extra_condition_channels": 4,
12
+ "guidance_embeds": true,
13
+ "in_channels": 64,
14
+ "joint_attention_dim": 4096,
15
+ "num_attention_heads": 24,
16
+ "num_layers": 6,
17
+ "num_single_layers": 0,
18
+ "patch_size": 1,
19
+ "pooled_projection_dim": 768
20
+ }