human-clap-wsce / config.json
ttakano398's picture
Upload folder using huggingface_hub
bfe1889 verified
{
"architectures": [
"ClapModel"
],
"audio_config": {
"_attn_implementation_autoset": true,
"aff_block_r": 4,
"attention_probs_dropout_prob": 0.0,
"depths": [
2,
2,
6,
2
],
"drop_path_rate": 0.0,
"enable_fusion": true,
"enable_patch_fusion": true,
"enable_patch_layer_norm": true,
"flatten_patch_embeds": true,
"fusion_num_hidden_layers": 2,
"fusion_type": null,
"hidden_act": "gelu",
"hidden_dropout_prob": 0.1,
"hidden_size": 768,
"initializer_factor": 1.0,
"layer_norm_eps": 1e-05,
"mlp_ratio": 4.0,
"model_type": "clap_audio_model",
"num_attention_heads": [
4,
8,
16,
32
],
"num_classes": 527,
"num_hidden_layers": 4,
"num_mel_bins": 64,
"patch_embed_input_channels": 1,
"patch_embeds_hidden_size": 96,
"patch_size": 4,
"patch_stride": [
4,
4
],
"projection_dim": 512,
"projection_hidden_act": "relu",
"projection_hidden_size": 768,
"qkv_bias": true,
"spec_size": 256,
"torch_dtype": "float32",
"window_size": 8
},
"hidden_size": 768,
"initializer_factor": 1.0,
"logit_scale_init_value": 14.285714285714285,
"model_type": "clap",
"num_hidden_layers": 16,
"projection_dim": 512,
"projection_hidden_act": "relu",
"text_config": {
"_attn_implementation_autoset": true,
"attention_probs_dropout_prob": 0.1,
"classifier_dropout": null,
"fusion_hidden_size": 768,
"fusion_num_hidden_layers": 2,
"hidden_act": "gelu",
"hidden_dropout_prob": 0.1,
"hidden_size": 768,
"initializer_factor": 1.0,
"initializer_range": 0.02,
"intermediate_size": 3072,
"layer_norm_eps": 1e-12,
"max_position_embeddings": 514,
"model_type": "clap_text_model",
"num_attention_heads": 12,
"num_hidden_layers": 12,
"position_embedding_type": "absolute",
"projection_dim": 512,
"projection_hidden_act": "relu",
"projection_hidden_size": 768,
"torch_dtype": "float32",
"type_vocab_size": 1,
"use_cache": true,
"vocab_size": 50265
},
"torch_dtype": "float32",
"transformers_version": "4.50.0"
}