AlekseyCalvin commited on
Commit
de51e59
·
verified ·
1 Parent(s): ba1cf8d

Update transformer/config.json

Browse files
Files changed (1) hide show
  1. transformer/config.json +9 -3
transformer/config.json CHANGED
@@ -1,13 +1,19 @@
1
  {
2
  "_class_name": "FluxTransformer2DModel",
3
- "_diffusers_version": "0.30.0.dev0",
4
  "attention_head_dim": 128,
5
- "guidance_embeds": false,
 
 
 
 
 
6
  "in_channels": 64,
7
  "joint_attention_dim": 4096,
8
  "num_attention_heads": 24,
9
  "num_layers": 19,
10
  "num_single_layers": 38,
 
11
  "patch_size": 1,
12
  "pooled_projection_dim": 768
13
- }
 
1
  {
2
  "_class_name": "FluxTransformer2DModel",
3
+ "_diffusers_version": "0.35.0.dev0",
4
  "attention_head_dim": 128,
5
+ "axes_dims_rope": [
6
+ 16,
7
+ 56,
8
+ 56
9
+ ],
10
+ "guidance_embeds": true,
11
  "in_channels": 64,
12
  "joint_attention_dim": 4096,
13
  "num_attention_heads": 24,
14
  "num_layers": 19,
15
  "num_single_layers": 38,
16
+ "out_channels": null,
17
  "patch_size": 1,
18
  "pooled_projection_dim": 768
19
+ }