Update config.json
Browse files- config.json +6 -0
config.json
CHANGED
@@ -2,6 +2,7 @@
|
|
2 |
"_num_labels": 3,
|
3 |
"activation_dropout": 0.0,
|
4 |
"activation_function": "gelu",
|
|
|
5 |
"add_final_layer_norm": true,
|
6 |
"attention_dropout": 0.0,
|
7 |
"bos_token_id": 0,
|
@@ -17,6 +18,7 @@
|
|
17 |
"encoder_layerdrop": 0.0,
|
18 |
"encoder_layers": 12,
|
19 |
"eos_token_id": 2,
|
|
|
20 |
"id2label": {
|
21 |
"0": "LABEL_0",
|
22 |
"1": "LABEL_1",
|
@@ -29,13 +31,17 @@
|
|
29 |
"LABEL_1": 1,
|
30 |
"LABEL_2": 2
|
31 |
},
|
|
|
|
|
32 |
"max_position_embeddings": 1024,
|
33 |
"model_type": "mbart",
|
34 |
"normalize_before": true,
|
|
|
35 |
"num_beams": 5,
|
36 |
"num_hidden_layers": 12,
|
37 |
"output_past": true,
|
38 |
"pad_token_id": 1,
|
39 |
"scale_embedding": true,
|
|
|
40 |
"vocab_size": 250027
|
41 |
}
|
|
|
2 |
"_num_labels": 3,
|
3 |
"activation_dropout": 0.0,
|
4 |
"activation_function": "gelu",
|
5 |
+
"add_bias_logits": false,
|
6 |
"add_final_layer_norm": true,
|
7 |
"attention_dropout": 0.0,
|
8 |
"bos_token_id": 0,
|
|
|
18 |
"encoder_layerdrop": 0.0,
|
19 |
"encoder_layers": 12,
|
20 |
"eos_token_id": 2,
|
21 |
+
"extra_pos_embeddings": 2,
|
22 |
"id2label": {
|
23 |
"0": "LABEL_0",
|
24 |
"1": "LABEL_1",
|
|
|
31 |
"LABEL_1": 1,
|
32 |
"LABEL_2": 2
|
33 |
},
|
34 |
+
"max_length": 100,
|
35 |
+
"decoder_start_token_id": 250020,
|
36 |
"max_position_embeddings": 1024,
|
37 |
"model_type": "mbart",
|
38 |
"normalize_before": true,
|
39 |
+
"normalize_embedding": true,
|
40 |
"num_beams": 5,
|
41 |
"num_hidden_layers": 12,
|
42 |
"output_past": true,
|
43 |
"pad_token_id": 1,
|
44 |
"scale_embedding": true,
|
45 |
+
"static_position_embeddings": false,
|
46 |
"vocab_size": 250027
|
47 |
}
|