Changed max_position_embeddings to 514 to be used with max_seq_len=512.
Browse files- config.json +2 -2
- pytorch_model.bin +2 -2
config.json
CHANGED
@@ -12,14 +12,14 @@
|
|
12 |
"initializer_range": 0.02,
|
13 |
"intermediate_size": 3072,
|
14 |
"layer_norm_eps": 1e-12,
|
15 |
-
"max_position_embeddings":
|
16 |
"model_type": "roberta",
|
17 |
"num_attention_heads": 12,
|
18 |
"num_hidden_layers": 12,
|
19 |
"pad_token_id": 0,
|
20 |
"position_embedding_type": "absolute",
|
21 |
"torch_dtype": "float32",
|
22 |
-
"transformers_version": "4.12.
|
23 |
"type_vocab_size": 2,
|
24 |
"use_cache": true,
|
25 |
"vocab_size": 32000
|
|
|
12 |
"initializer_range": 0.02,
|
13 |
"intermediate_size": 3072,
|
14 |
"layer_norm_eps": 1e-12,
|
15 |
+
"max_position_embeddings": 514,
|
16 |
"model_type": "roberta",
|
17 |
"num_attention_heads": 12,
|
18 |
"num_hidden_layers": 12,
|
19 |
"pad_token_id": 0,
|
20 |
"position_embedding_type": "absolute",
|
21 |
"torch_dtype": "float32",
|
22 |
+
"transformers_version": "4.12.5",
|
23 |
"type_vocab_size": 2,
|
24 |
"use_cache": true,
|
25 |
"vocab_size": 32000
|
pytorch_model.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:dd1565ea4191a9d617955b40830b18b948d776f71f702303f4a3c1028a603008
|
3 |
+
size 442554161
|