Update config.json (#4)
Browse files- Update config.json (16906fb23343bb3495c05515ae49ccac5f4b22d4)
Co-authored-by: Sedrick Keh <[email protected]>
- config.json +2 -2
config.json
CHANGED
|
@@ -6,11 +6,11 @@
|
|
| 6 |
"params": null,
|
| 7 |
"apply_qk_norm": true,
|
| 8 |
"attn_activation": null,
|
| 9 |
-
"attn_name": "
|
| 10 |
"attn_seq_scalar": null,
|
| 11 |
"attn_seq_scalar_alpha": null,
|
| 12 |
"dim": 4096,
|
| 13 |
-
"ffn_type": "
|
| 14 |
"model": "open_lm_7b",
|
| 15 |
"model_norm": "gain_only_lp_layer_norm",
|
| 16 |
"moe_capacity_factor": 1.25,
|
|
|
|
| 6 |
"params": null,
|
| 7 |
"apply_qk_norm": true,
|
| 8 |
"attn_activation": null,
|
| 9 |
+
"attn_name": "torch_attn",
|
| 10 |
"attn_seq_scalar": null,
|
| 11 |
"attn_seq_scalar_alpha": null,
|
| 12 |
"dim": 4096,
|
| 13 |
+
"ffn_type": "swiglu_torch",
|
| 14 |
"model": "open_lm_7b",
|
| 15 |
"model_norm": "gain_only_lp_layer_norm",
|
| 16 |
"moe_capacity_factor": 1.25,
|