Upload config
Browse files- config.json +1 -9
    	
        config.json
    CHANGED
    
    | @@ -1,9 +1,6 @@ | |
| 1 | 
             
            {
         | 
| 2 | 
             
              "activation_dropout": 0.0,
         | 
| 3 | 
             
              "activation_function": "gelu",
         | 
| 4 | 
            -
              "architectures": [
         | 
| 5 | 
            -
                "WhisperModel"
         | 
| 6 | 
            -
              ],
         | 
| 7 | 
             
              "attention_dropout": 0.0,
         | 
| 8 | 
             
              "bos_token_id": 50257,
         | 
| 9 | 
             
              "d_model": 768,
         | 
| @@ -11,11 +8,7 @@ | |
| 11 | 
             
              "decoder_ffn_dim": 3072,
         | 
| 12 | 
             
              "decoder_layerdrop": 0.0,
         | 
| 13 | 
             
              "decoder_layers": 12,
         | 
| 14 | 
            -
              "decoder_start_token_id":  | 
| 15 | 
            -
                50258,
         | 
| 16 | 
            -
                50259,
         | 
| 17 | 
            -
                50359
         | 
| 18 | 
            -
              ],
         | 
| 19 | 
             
              "dropout": 0.0,
         | 
| 20 | 
             
              "encoder_attention_heads": 12,
         | 
| 21 | 
             
              "encoder_ffn_dim": 3072,
         | 
| @@ -119,7 +112,6 @@ | |
| 119 | 
             
              "num_mel_bins": 80,
         | 
| 120 | 
             
              "pad_token_id": 0,
         | 
| 121 | 
             
              "scale_embedding": false,
         | 
| 122 | 
            -
              "torch_dtype": "float32",
         | 
| 123 | 
             
              "transformers_version": "4.23.0.dev0",
         | 
| 124 | 
             
              "use_cache": true,
         | 
| 125 | 
             
              "vocab_size": 51865
         | 
|  | |
| 1 | 
             
            {
         | 
| 2 | 
             
              "activation_dropout": 0.0,
         | 
| 3 | 
             
              "activation_function": "gelu",
         | 
|  | |
|  | |
|  | |
| 4 | 
             
              "attention_dropout": 0.0,
         | 
| 5 | 
             
              "bos_token_id": 50257,
         | 
| 6 | 
             
              "d_model": 768,
         | 
|  | |
| 8 | 
             
              "decoder_ffn_dim": 3072,
         | 
| 9 | 
             
              "decoder_layerdrop": 0.0,
         | 
| 10 | 
             
              "decoder_layers": 12,
         | 
| 11 | 
            +
              "decoder_start_token_id": 50257,
         | 
|  | |
|  | |
|  | |
|  | |
| 12 | 
             
              "dropout": 0.0,
         | 
| 13 | 
             
              "encoder_attention_heads": 12,
         | 
| 14 | 
             
              "encoder_ffn_dim": 3072,
         | 
|  | |
| 112 | 
             
              "num_mel_bins": 80,
         | 
| 113 | 
             
              "pad_token_id": 0,
         | 
| 114 | 
             
              "scale_embedding": false,
         | 
|  | |
| 115 | 
             
              "transformers_version": "4.23.0.dev0",
         | 
| 116 | 
             
              "use_cache": true,
         | 
| 117 | 
             
              "vocab_size": 51865
         | 

