|
{ |
|
"_name_or_path": "openai/whisper-large-v2", |
|
"activation_dropout": 0.0, |
|
"activation_function": "gelu", |
|
"apply_spec_augment": false, |
|
"architectures": [ |
|
"WhisperForConditionalGeneration" |
|
], |
|
"attention_dropout": 0.0, |
|
"begin_suppress_tokens": [ |
|
186, |
|
49639 |
|
], |
|
"bos_token_id": 49639, |
|
"classifier_proj_size": 256, |
|
"d_model": 1280, |
|
"decoder_attention_heads": 20, |
|
"decoder_ffn_dim": 5120, |
|
"decoder_layerdrop": 0.0, |
|
"decoder_layers": 32, |
|
"decoder_start_token_id": 49640, |
|
"dropout": 0.0, |
|
"encoder_attention_heads": 20, |
|
"encoder_ffn_dim": 5120, |
|
"encoder_layerdrop": 0.0, |
|
"encoder_layers": 32, |
|
"eos_token_id": 49639, |
|
"forced_decoder_ids": [ |
|
[ |
|
1, |
|
49641 |
|
], |
|
[ |
|
49741 |
|
], |
|
[ |
|
49745 |
|
] |
|
], |
|
"init_std": 0.02, |
|
"is_encoder_decoder": true, |
|
"mask_feature_length": 10, |
|
"mask_feature_min_masks": 0, |
|
"mask_feature_prob": 0.0, |
|
"mask_time_length": 10, |
|
"mask_time_min_masks": 2, |
|
"mask_time_prob": 0.05, |
|
"max_length": 448, |
|
"max_source_positions": 1500, |
|
"max_target_positions": 448, |
|
"median_filter_width": 7, |
|
"model_type": "whisper", |
|
"num_hidden_layers": 32, |
|
"num_mel_bins": 80, |
|
"pad_token_id": 49639, |
|
"scale_embedding": false, |
|
"suppress_tokens": [ |
|
1, |
|
3, |
|
4, |
|
8, |
|
9, |
|
324, |
|
467, |
|
486, |
|
506, |
|
834, |
|
862, |
|
878, |
|
882, |
|
891, |
|
1305, |
|
1801, |
|
1929, |
|
2400, |
|
2566, |
|
3178, |
|
3185, |
|
3200, |
|
3461, |
|
3768, |
|
3883, |
|
4103, |
|
4580, |
|
6473, |
|
6533, |
|
7148, |
|
8901, |
|
10253, |
|
10747, |
|
11742, |
|
11835, |
|
12130, |
|
12359, |
|
13567, |
|
13924, |
|
14396, |
|
15019, |
|
15372, |
|
16292, |
|
16343, |
|
18081, |
|
18670, |
|
21357, |
|
22189, |
|
25749, |
|
25780, |
|
26052, |
|
27878, |
|
31206, |
|
31852, |
|
32019, |
|
36364, |
|
42314, |
|
46827, |
|
49255, |
|
49636, |
|
49640, |
|
49740, |
|
49741, |
|
49742, |
|
49743, |
|
49744 |
|
], |
|
"torch_dtype": "float32", |
|
"transformers_version": "4.35.2", |
|
"use_cache": true, |
|
"use_weighted_layer_sum": false, |
|
"vocab_size": 51247 |
|
} |
|
|