qingy2024 commited on
Commit
12d6322
·
verified ·
1 Parent(s): b83be0b

revert back to old config

Browse files
Files changed (1) hide show
  1. config.json +4 -12
config.json CHANGED
@@ -4,6 +4,7 @@
4
  ],
5
  "attention_bias": true,
6
  "attention_dropout": 0.0,
 
7
  "eos_token_id": 200002,
8
  "experts_per_token": 4,
9
  "head_dim": 64,
@@ -46,16 +47,7 @@
46
  "num_key_value_heads": 8,
47
  "num_local_experts": 32,
48
  "output_router_logits": false,
49
- "pad_token_id": 200017,
50
- "quantization_config": {
51
- "modules_to_not_convert": [
52
- "model.layers.*.self_attn",
53
- "model.layers.*.mlp.router",
54
- "model.embed_tokens",
55
- "lm_head"
56
- ],
57
- "quant_method": "mxfp4"
58
- },
59
  "rms_norm_eps": 1e-05,
60
  "rope_scaling": {
61
  "beta_fast": 32.0,
@@ -70,8 +62,8 @@
70
  "sliding_window": 128,
71
  "swiglu_limit": 7.0,
72
  "tie_word_embeddings": false,
73
- "transformers_version": "4.55.0",
74
- "unsloth_fixed": true,
75
  "use_cache": true,
76
  "vocab_size": 201088
77
  }
 
4
  ],
5
  "attention_bias": true,
6
  "attention_dropout": 0.0,
7
+ "dtype": "bfloat16",
8
  "eos_token_id": 200002,
9
  "experts_per_token": 4,
10
  "head_dim": 64,
 
47
  "num_key_value_heads": 8,
48
  "num_local_experts": 32,
49
  "output_router_logits": false,
50
+ "pad_token_id": 199999,
 
 
 
 
 
 
 
 
 
51
  "rms_norm_eps": 1e-05,
52
  "rope_scaling": {
53
  "beta_fast": 32.0,
 
62
  "sliding_window": 128,
63
  "swiglu_limit": 7.0,
64
  "tie_word_embeddings": false,
65
+ "transformers_version": "4.56.0",
66
+ "unsloth_version": "2025.9.4",
67
  "use_cache": true,
68
  "vocab_size": 201088
69
  }