Upload config.json
Browse files- config.json +2 -2
config.json
CHANGED
|
@@ -6,7 +6,7 @@
|
|
| 6 |
"attention_dropout": 0.0,
|
| 7 |
"bos_token_id": 2348,
|
| 8 |
"dtype": "bfloat16",
|
| 9 |
-
"eos_token_id":
|
| 10 |
"head_dim": 128,
|
| 11 |
"hidden_act": "silu",
|
| 12 |
"hidden_size": 2048,
|
|
@@ -48,7 +48,7 @@
|
|
| 48 |
"num_attention_heads": 16,
|
| 49 |
"num_hidden_layers": 28,
|
| 50 |
"num_key_value_heads": 8,
|
| 51 |
-
"pad_token_id":
|
| 52 |
"rms_norm_eps": 1e-06,
|
| 53 |
"rope_scaling": null,
|
| 54 |
"rope_theta": 1000000,
|
|
|
|
| 6 |
"attention_dropout": 0.0,
|
| 7 |
"bos_token_id": 2348,
|
| 8 |
"dtype": "bfloat16",
|
| 9 |
+
"eos_token_id": 2348,
|
| 10 |
"head_dim": 128,
|
| 11 |
"hidden_act": "silu",
|
| 12 |
"hidden_size": 2048,
|
|
|
|
| 48 |
"num_attention_heads": 16,
|
| 49 |
"num_hidden_layers": 28,
|
| 50 |
"num_key_value_heads": 8,
|
| 51 |
+
"pad_token_id": 2348,
|
| 52 |
"rms_norm_eps": 1e-06,
|
| 53 |
"rope_scaling": null,
|
| 54 |
"rope_theta": 1000000,
|