mpasila's picture
Upload 10 files
2cd1cf1 verified
{
"_name_or_path": "whisper-turbo",
"activation_dropout": 0.0,
"activation_function": "gelu",
"apply_spec_augment": false,
"architectures": [
"WhisperForConditionalGeneration"
],
"attention_dropout": 0.0,
"begin_suppress_tokens": [
220,
18871,
18978
],
"bos_token_id": 18871,
"classifier_proj_size": 256,
"d_model": 1280,
"decoder_attention_heads": 20,
"decoder_ffn_dim": 5120,
"decoder_layerdrop": 0.0,
"decoder_layers": 4,
"decoder_start_token_id": 18872,
"dropout": 0.0,
"encoder_attention_heads": 20,
"encoder_ffn_dim": 5120,
"encoder_layerdrop": 0.0,
"encoder_layers": 32,
"eos_token_id": 18871,
"init_std": 0.02,
"is_encoder_decoder": true,
"mask_feature_length": 10,
"mask_feature_min_masks": 0,
"mask_feature_prob": 0.0,
"mask_time_length": 10,
"mask_time_min_masks": 2,
"mask_time_prob": 0.05,
"max_source_positions": 1500,
"max_target_positions": 448,
"median_filter_width": 7,
"model_type": "whisper",
"num_hidden_layers": 32,
"num_mel_bins": 128,
"pad_token_id": 18870,
"scale_embedding": false,
"torch_dtype": "float16",
"transformers_version": "4.46.0.dev0",
"use_cache": true,
"use_weighted_layer_sum": false,
"vocab_size": 20480,
"alignment_heads": [
[
1,
1
],
[
1,
8
],
[
1,
12
],
[
1,
19
],
[
2,
4
],
[
2,
11
],
[
2,
16
],
[
3,
4
],
[
3,
6
],
[
3,
12
]
],
"lang_ids": [
18873,
18874,
18875,
18876,
18877,
18878,
18879,
18880,
18881,
18882,
18883,
18884,
18885,
18886,
18887,
18888,
18889,
18890,
18891,
18892,
18893,
18894,
18895,
18896,
18897,
18898,
18899,
18900,
18901,
18902,
18903,
18904,
18905,
18906,
18907,
18908,
18909,
18910,
18911,
18912,
18913,
18914,
18915,
18916,
18917,
18918,
18919,
18920,
18921,
18922,
18923,
18924,
18925,
18926,
18927,
18928,
18929,
18930,
18931,
18932,
18933,
18934,
18935,
18936,
18937,
18938,
18939,
18940,
18941,
18942,
18943,
18944,
18945,
18946,
18947,
18948,
18949,
18950,
18951,
18952,
18953,
18954,
18955,
18956,
18957,
18958,
18959,
18960,
18961,
18962,
18963,
18964,
18965,
18966,
18967,
18968,
18969,
18970,
18971,
18972
],
"suppress_ids": [],
"suppress_ids_begin": [
220,
18871,
18978
]
}