{ | |
"patch_size": 1, | |
"in_channels": 16, | |
"num_layers": 4, | |
"num_single_layers": 4, | |
"attention_head_dim": 96, | |
"num_attention_heads": 24, | |
"joint_attention_dim": 4096, | |
"pooled_projection_dim": null, | |
"guidance_embeds": false, | |
"axes_dims_rope": [ | |
0, | |
48, | |
48 | |
], | |
"rope_theta": 10000, | |
"time_theta": 10000, | |
"_class_name": "BriaTransformer2DModel", | |
"_diffusers_version": "0.32.2", | |
"_name_or_path": "briaai/BRIA-3.1", | |
"num_mode": 6 | |
} |