leejunhyeok commited on
Commit
8dbdeff
·
verified ·
1 Parent(s): f76fc65

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +0 -6
config.json CHANGED
@@ -41,10 +41,6 @@
41
  "rms_norm_eps": 1e-06,
42
  "rope_scaling": null,
43
  "rope_theta": 500000.0,
44
- "routed_scaling_factor": null,
45
- "scale_emb": 1,
46
- "scoring_func": null,
47
- "seq_aux": null,
48
  "sliding_window": null,
49
  "tie_word_embeddings": true,
50
  "topk_group": null,
@@ -53,8 +49,6 @@
53
  "transformers_version": "4.46.3",
54
  "use_bias": false,
55
  "use_cache": true,
56
- "use_fused_mlp": null,
57
- "use_mrope": false,
58
  "use_qk_norm": false,
59
  "use_sliding_window": false,
60
  "vocab_size": 219520,
 
41
  "rms_norm_eps": 1e-06,
42
  "rope_scaling": null,
43
  "rope_theta": 500000.0,
 
 
 
 
44
  "sliding_window": null,
45
  "tie_word_embeddings": true,
46
  "topk_group": null,
 
49
  "transformers_version": "4.46.3",
50
  "use_bias": false,
51
  "use_cache": true,
 
 
52
  "use_qk_norm": false,
53
  "use_sliding_window": false,
54
  "vocab_size": 219520,