Change BOS token from 0 to 2 as BOS token is equal to EOS for OPT. See: https://github.com/huggingface/transformers/issues/17431
#1
by
patrickvonplaten
- opened
- config.json +1 -1
config.json
CHANGED
@@ -5,7 +5,7 @@
|
|
5 |
"OPTForCausalLM"
|
6 |
],
|
7 |
"attention_dropout": 0.0,
|
8 |
-
"bos_token_id":
|
9 |
"hidden_size": 2560,
|
10 |
"do_layer_norm_before": true,
|
11 |
"dropout": 0.1,
|
|
|
5 |
"OPTForCausalLM"
|
6 |
],
|
7 |
"attention_dropout": 0.0,
|
8 |
+
"bos_token_id": 2,
|
9 |
"hidden_size": 2560,
|
10 |
"do_layer_norm_before": true,
|
11 |
"dropout": 0.1,
|