yimingzhang commited on
Commit
407cb19
·
verified ·
1 Parent(s): 0a6ca48

Upload config.json

Browse files
Files changed (1) hide show
  1. config.json +2 -2
config.json CHANGED
@@ -6,7 +6,7 @@
6
  "attention_dropout": 0.0,
7
  "bos_token_id": 2348,
8
  "dtype": "bfloat16",
9
- "eos_token_id": 2346,
10
  "head_dim": 128,
11
  "hidden_act": "silu",
12
  "hidden_size": 2048,
@@ -48,7 +48,7 @@
48
  "num_attention_heads": 16,
49
  "num_hidden_layers": 28,
50
  "num_key_value_heads": 8,
51
- "pad_token_id": 2349,
52
  "rms_norm_eps": 1e-06,
53
  "rope_scaling": null,
54
  "rope_theta": 1000000,
 
6
  "attention_dropout": 0.0,
7
  "bos_token_id": 2348,
8
  "dtype": "bfloat16",
9
+ "eos_token_id": 2348,
10
  "head_dim": 128,
11
  "hidden_act": "silu",
12
  "hidden_size": 2048,
 
48
  "num_attention_heads": 16,
49
  "num_hidden_layers": 28,
50
  "num_key_value_heads": 8,
51
+ "pad_token_id": 2348,
52
  "rms_norm_eps": 1e-06,
53
  "rope_scaling": null,
54
  "rope_theta": 1000000,