File size: 1,109 Bytes
be1ae9b
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
{
  "add_teeth": false,
  "encoder_feat_dim": 1024,
  "encoder_freeze": false,
  "encoder_grad_ckpt": true,
  "encoder_model_name": "dinov2_vitl14_reg",
  "encoder_type": "dinov2_fusion",
  "expr_param_dim": 10,
  "fix_opacity": false,
  "fix_rotation": false,
  "flame_subdivide_num": 1,
  "flame_type": "flame",
  "gs_clip_scaling": 0.01,
  "gs_mlp_network_config": {
    "activation": "silu",
    "n_hidden_layers": 2,
    "n_neurons": 512
  },
  "gs_query_dim": 1024,
  "gs_sh": 3,
  "gs_use_rgb": true,
  "gs_xyz_offset_max_step": 0.2,
  "has_disc": false,
  "human_model_path": "./pretrained_models/human_model_files",
  "latent_query_points_type": "e2e_flame",
  "oral_mesh_flag": false,
  "pcl_dim": 1024,
  "scale_sphere": false,
  "shape_param_dim": 10,
  "teeth_bs_flag": false,
  "tf_grad_ckpt": true,
  "transformer_dim": 1024,
  "transformer_heads": 16,
  "transformer_layers": 10,
  "transformer_type": "sd3_cond",
  "use_32d": false,
  "use_conf_map": false,
  "use_dual_attention": false,
  "use_gag": false,
  "use_projection_enhancement": false,
  "use_sr": false,
  "use_sym_proj": false
}