NexVeridian commited on
Commit
e552f60
·
verified ·
1 Parent(s): fa26739

Add files using upload-large-folder tool

Browse files
README.md CHANGED
@@ -12,7 +12,7 @@ base_model: openai/gpt-oss-120b
12
 
13
  This model [NexVeridian/gpt-oss-120b-4bit](https://huggingface.co/NexVeridian/gpt-oss-120b-4bit) was
14
  converted to MLX format from [openai/gpt-oss-120b](https://huggingface.co/openai/gpt-oss-120b)
15
- using mlx-lm version **0.26.3**.
16
 
17
  ## Use with mlx
18
 
 
12
 
13
  This model [NexVeridian/gpt-oss-120b-4bit](https://huggingface.co/NexVeridian/gpt-oss-120b-4bit) was
14
  converted to MLX format from [openai/gpt-oss-120b](https://huggingface.co/openai/gpt-oss-120b)
15
+ using mlx-lm version **0.27.0**.
16
 
17
  ## Use with mlx
18
 
chat_template.jinja CHANGED
@@ -84,8 +84,7 @@
84
 
85
  {%- elif param_spec.type == "object" -%}
86
  {%- if param_spec.properties -%}
87
- {{- "{
88
- " }}
89
  {%- for prop_name, prop_spec in param_spec.properties.items() -%}
90
  {{- prop_name -}}
91
  {%- if prop_name not in (param_spec.required or []) -%}
@@ -107,24 +106,17 @@
107
  {%- endmacro -%}
108
 
109
  {%- macro render_tool_namespace(namespace_name, tools) -%}
110
- {{- "## " + namespace_name + "
111
-
112
- " }}
113
- {{- "namespace " + namespace_name + " {
114
-
115
- " }}
116
  {%- for tool in tools %}
117
  {%- set tool = tool.function %}
118
- {{- "// " + tool.description + "
119
- " }}
120
  {{- "type "+ tool.name + " = " }}
121
  {%- if tool.parameters and tool.parameters.properties %}
122
- {{- "(_: {
123
- " }}
124
  {%- for param_name, param_spec in tool.parameters.properties.items() %}
125
  {%- if param_spec.description %}
126
- {{- "// " + param_spec.description + "
127
- " }}
128
  {%- endif %}
129
  {{- param_name }}
130
  {%- if param_name not in (tool.parameters.required or []) -%}
@@ -142,20 +134,14 @@
142
  {%- endif -%}
143
  {%- endif -%}
144
  {%- if not loop.last %}
145
- {{- ",
146
- " }}
147
  {%- else %}
148
- {{- "
149
- " }}
150
  {%- endif -%}
151
  {%- endfor %}
152
- {{- "}) => any;
153
-
154
- " }}
155
  {%- else -%}
156
- {{- "() => any;
157
-
158
- " }}
159
  {%- endif -%}
160
  {%- endfor %}
161
  {{- "} // namespace " + namespace_name }}
@@ -163,92 +149,46 @@
163
 
164
  {%- macro render_builtin_tools(browser_tool, python_tool) -%}
165
  {%- if browser_tool %}
166
- {{- "## browser
167
-
168
- " }}
169
- {{- "// Tool for browsing.
170
- " }}
171
- {{- "// The `cursor` appears in brackets before each browsing display: `[{cursor}]`.
172
- " }}
173
- {{- "// Cite information from the tool using the following format:
174
- " }}
175
- {{- "// `【{cursor}†L{line_start}(-L{line_end})?】`, for example: `【6†L9-L11】` or `【8†L3】`.
176
- " }}
177
- {{- "// Do not quote more than 10 words directly from the tool output.
178
- " }}
179
- {{- "// sources=web (default: web)
180
- " }}
181
- {{- "namespace browser {
182
-
183
- " }}
184
- {{- "// Searches for information related to `query` and displays `topn` results.
185
- " }}
186
- {{- "type search = (_: {
187
- " }}
188
- {{- "query: string,
189
- " }}
190
- {{- "topn?: number, // default: 10
191
- " }}
192
- {{- "source?: string,
193
- " }}
194
- {{- "}) => any;
195
-
196
- " }}
197
- {{- "// Opens the link `id` from the page indicated by `cursor` starting at line number `loc`, showing `num_lines` lines.
198
- " }}
199
- {{- "// Valid link ids are displayed with the formatting: `【{id}†.*】`.
200
- " }}
201
- {{- "// If `cursor` is not provided, the most recent page is implied.
202
- " }}
203
- {{- "// If `id` is a string, it is treated as a fully qualified URL associated with `source`.
204
- " }}
205
- {{- "// If `loc` is not provided, the viewport will be positioned at the beginning of the document or centered on the most relevant passage, if available.
206
- " }}
207
- {{- "// Use this function without `id` to scroll to a new location of an opened page.
208
- " }}
209
- {{- "type open = (_: {
210
- " }}
211
- {{- "id?: number | string, // default: -1
212
- " }}
213
- {{- "cursor?: number, // default: -1
214
- " }}
215
- {{- "loc?: number, // default: -1
216
- " }}
217
- {{- "num_lines?: number, // default: -1
218
- " }}
219
- {{- "view_source?: boolean, // default: false
220
- " }}
221
- {{- "source?: string,
222
- " }}
223
- {{- "}) => any;
224
-
225
- " }}
226
- {{- "// Finds exact matches of `pattern` in the current page, or the page given by `cursor`.
227
- " }}
228
- {{- "type find = (_: {
229
- " }}
230
- {{- "pattern: string,
231
- " }}
232
- {{- "cursor?: number, // default: -1
233
- " }}
234
- {{- "}) => any;
235
-
236
- " }}
237
- {{- "} // namespace browser
238
-
239
- " }}
240
  {%- endif -%}
241
 
242
  {%- if python_tool %}
243
- {{- "## python
244
-
245
- " }}
246
- {{- "Use this tool to execute Python code in your chain of thought. The code will not be shown to the user. This tool should be used for internal reasoning, but not for code that is intended to be visible to the user (e.g. when creating plots, tables, or files).
247
-
248
- " }}
249
- {{- "When you send a message containing Python code to python, it will be executed in a stateful Jupyter notebook environment. python will respond with the output of the execution or time out after 120.0 seconds. The drive at '/mnt/data' can be used to save and persist user files. Internet access for this session is UNKNOWN. Depends on the cluster.
250
-
251
- " }}
252
  {%- endif -%}
253
  {%- endmacro -%}
254
 
@@ -257,23 +197,15 @@
257
  {%- if model_identity is not defined %}
258
  {%- set model_identity = "You are ChatGPT, a large language model trained by OpenAI." %}
259
  {%- endif %}
260
- {{- model_identity + "
261
- " }}
262
- {{- "Knowledge cutoff: 2024-06
263
- " }}
264
- {{- "Current date: " + strftime_now("%Y-%m-%d") + "
265
-
266
- " }}
267
  {%- if reasoning_effort is not defined %}
268
  {%- set reasoning_effort = "medium" %}
269
  {%- endif %}
270
- {{- "Reasoning: " + reasoning_effort + "
271
-
272
- " }}
273
  {%- if builtin_tools %}
274
- {{- "# Tools
275
-
276
- " }}
277
  {%- set available_builtin_tools = namespace(browser=false, python=false) %}
278
  {%- for tool in builtin_tools %}
279
  {%- if tool == "browser" %}
@@ -286,8 +218,7 @@
286
  {%- endif -%}
287
  {{- "# Valid channels: analysis, commentary, final. Channel must be included for every message." }}
288
  {%- if tools -%}
289
- {{- "
290
- Calls to these tools must go to the commentary channel: 'functions'." }}
291
  {%- endif -%}
292
  {%- endmacro -%}
293
 
@@ -312,18 +243,12 @@ Calls to these tools must go to the commentary channel: 'functions'." }}
312
  {%- if developer_message or tools %}
313
  {{- "<|start|>developer<|message|>" }}
314
  {%- if developer_message %}
315
- {{- "# Instructions
316
-
317
- " }}
318
  {{- developer_message }}
 
319
  {%- endif %}
320
  {%- if tools -%}
321
- {{- "
322
-
323
- " }}
324
- {{- "# Tools
325
-
326
- " }}
327
  {{- render_tool_namespace("functions", tools) }}
328
  {%- endif -%}
329
  {{- "<|end|>" }}
@@ -346,6 +271,15 @@ Calls to these tools must go to the commentary channel: 'functions'." }}
346
  {%- endif %}
347
  {%- endif %}
348
  {%- if "tool_calls" in message %}
 
 
 
 
 
 
 
 
 
349
  {#- We assume max 1 tool call per message, and so we infer the tool call name #}
350
  {#- in "tool" messages from the most recent assistant tool call name #}
351
  {%- set tool_call = message.tool_calls[0] %}
@@ -354,9 +288,9 @@ Calls to these tools must go to the commentary channel: 'functions'." }}
354
  {%- endif %}
355
  {%- if message.content and message.thinking %}
356
  {{- raise_exception("Cannot pass both content and thinking in an assistant message with tool calls! Put the analysis message in one or the other, but not both.") }}
357
- {%- elif message.content %}
358
  {{- "<|start|>assistant<|channel|>analysis<|message|>" + message.content + "<|end|>" }}
359
- {%- elif message.thinking %}
360
  {{- "<|start|>assistant<|channel|>analysis<|message|>" + message.thinking + "<|end|>" }}
361
  {%- endif %}
362
  {{- "<|start|>assistant to=" }}
 
84
 
85
  {%- elif param_spec.type == "object" -%}
86
  {%- if param_spec.properties -%}
87
+ {{- "{\n" }}
 
88
  {%- for prop_name, prop_spec in param_spec.properties.items() -%}
89
  {{- prop_name -}}
90
  {%- if prop_name not in (param_spec.required or []) -%}
 
106
  {%- endmacro -%}
107
 
108
  {%- macro render_tool_namespace(namespace_name, tools) -%}
109
+ {{- "## " + namespace_name + "\n\n" }}
110
+ {{- "namespace " + namespace_name + " {\n\n" }}
 
 
 
 
111
  {%- for tool in tools %}
112
  {%- set tool = tool.function %}
113
+ {{- "// " + tool.description + "\n" }}
 
114
  {{- "type "+ tool.name + " = " }}
115
  {%- if tool.parameters and tool.parameters.properties %}
116
+ {{- "(_: {\n" }}
 
117
  {%- for param_name, param_spec in tool.parameters.properties.items() %}
118
  {%- if param_spec.description %}
119
+ {{- "// " + param_spec.description + "\n" }}
 
120
  {%- endif %}
121
  {{- param_name }}
122
  {%- if param_name not in (tool.parameters.required or []) -%}
 
134
  {%- endif -%}
135
  {%- endif -%}
136
  {%- if not loop.last %}
137
+ {{- ",\n" }}
 
138
  {%- else %}
139
+ {{- ",\n" }}
 
140
  {%- endif -%}
141
  {%- endfor %}
142
+ {{- "}) => any;\n\n" }}
 
 
143
  {%- else -%}
144
+ {{- "() => any;\n\n" }}
 
 
145
  {%- endif -%}
146
  {%- endfor %}
147
  {{- "} // namespace " + namespace_name }}
 
149
 
150
  {%- macro render_builtin_tools(browser_tool, python_tool) -%}
151
  {%- if browser_tool %}
152
+ {{- "## browser\n\n" }}
153
+ {{- "// Tool for browsing.\n" }}
154
+ {{- "// The `cursor` appears in brackets before each browsing display: `[{cursor}]`.\n" }}
155
+ {{- "// Cite information from the tool using the following format:\n" }}
156
+ {{- "// `【{cursor}†L{line_start}(-L{line_end})?】`, for example: `【6†L9-L11】` or `【8†L3】`.\n" }}
157
+ {{- "// Do not quote more than 10 words directly from the tool output.\n" }}
158
+ {{- "// sources=web (default: web)\n" }}
159
+ {{- "namespace browser {\n\n" }}
160
+ {{- "// Searches for information related to `query` and displays `topn` results.\n" }}
161
+ {{- "type search = (_: {\n" }}
162
+ {{- "query: string,\n" }}
163
+ {{- "topn?: number, // default: 10\n" }}
164
+ {{- "source?: string,\n" }}
165
+ {{- "}) => any;\n\n" }}
166
+ {{- "// Opens the link `id` from the page indicated by `cursor` starting at line number `loc`, showing `num_lines` lines.\n" }}
167
+ {{- "// Valid link ids are displayed with the formatting: `【{id}†.*】`.\n" }}
168
+ {{- "// If `cursor` is not provided, the most recent page is implied.\n" }}
169
+ {{- "// If `id` is a string, it is treated as a fully qualified URL associated with `source`.\n" }}
170
+ {{- "// If `loc` is not provided, the viewport will be positioned at the beginning of the document or centered on the most relevant passage, if available.\n" }}
171
+ {{- "// Use this function without `id` to scroll to a new location of an opened page.\n" }}
172
+ {{- "type open = (_: {\n" }}
173
+ {{- "id?: number | string, // default: -1\n" }}
174
+ {{- "cursor?: number, // default: -1\n" }}
175
+ {{- "loc?: number, // default: -1\n" }}
176
+ {{- "num_lines?: number, // default: -1\n" }}
177
+ {{- "view_source?: boolean, // default: false\n" }}
178
+ {{- "source?: string,\n" }}
179
+ {{- "}) => any;\n\n" }}
180
+ {{- "// Finds exact matches of `pattern` in the current page, or the page given by `cursor`.\n" }}
181
+ {{- "type find = (_: {\n" }}
182
+ {{- "pattern: string,\n" }}
183
+ {{- "cursor?: number, // default: -1\n" }}
184
+ {{- "}) => any;\n\n" }}
185
+ {{- "} // namespace browser\n\n" }}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
186
  {%- endif -%}
187
 
188
  {%- if python_tool %}
189
+ {{- "## python\n\n" }}
190
+ {{- "Use this tool to execute Python code in your chain of thought. The code will not be shown to the user. This tool should be used for internal reasoning, but not for code that is intended to be visible to the user (e.g. when creating plots, tables, or files).\n\n" }}
191
+ {{- "When you send a message containing Python code to python, it will be executed in a stateful Jupyter notebook environment. python will respond with the output of the execution or time out after 120.0 seconds. The drive at '/mnt/data' can be used to save and persist user files. Internet access for this session is UNKNOWN. Depends on the cluster.\n\n" }}
 
 
 
 
 
 
192
  {%- endif -%}
193
  {%- endmacro -%}
194
 
 
197
  {%- if model_identity is not defined %}
198
  {%- set model_identity = "You are ChatGPT, a large language model trained by OpenAI." %}
199
  {%- endif %}
200
+ {{- model_identity + "\n" }}
201
+ {{- "Knowledge cutoff: 2024-06\n" }}
202
+ {{- "Current date: " + strftime_now("%Y-%m-%d") + "\n\n" }}
 
 
 
 
203
  {%- if reasoning_effort is not defined %}
204
  {%- set reasoning_effort = "medium" %}
205
  {%- endif %}
206
+ {{- "Reasoning: " + reasoning_effort + "\n\n" }}
 
 
207
  {%- if builtin_tools %}
208
+ {{- "# Tools\n\n" }}
 
 
209
  {%- set available_builtin_tools = namespace(browser=false, python=false) %}
210
  {%- for tool in builtin_tools %}
211
  {%- if tool == "browser" %}
 
218
  {%- endif -%}
219
  {{- "# Valid channels: analysis, commentary, final. Channel must be included for every message." }}
220
  {%- if tools -%}
221
+ {{- "\nCalls to these tools must go to the commentary channel: 'functions'." }}
 
222
  {%- endif -%}
223
  {%- endmacro -%}
224
 
 
243
  {%- if developer_message or tools %}
244
  {{- "<|start|>developer<|message|>" }}
245
  {%- if developer_message %}
246
+ {{- "# Instructions\n\n" }}
 
 
247
  {{- developer_message }}
248
+ {{- "\n\n" }}
249
  {%- endif %}
250
  {%- if tools -%}
251
+ {{- "# Tools\n\n" }}
 
 
 
 
 
252
  {{- render_tool_namespace("functions", tools) }}
253
  {%- endif -%}
254
  {{- "<|end|>" }}
 
271
  {%- endif %}
272
  {%- endif %}
273
  {%- if "tool_calls" in message %}
274
+ {#- We need very careful handling here - we want to drop the tool call analysis message if the model #}
275
+ {#- has output a later <|final|> message, but otherwise we want to retain it. This is the only case #}
276
+ {#- when we render CoT/analysis messages in inference. #}
277
+ {%- set future_final_message = namespace(found=false) %}
278
+ {%- for future_message in loop_messages[loop.index:] %}
279
+ {%- if future_message.role == 'assistant' and "tool_calls" not in future_message %}
280
+ {%- set future_final_message.found = true %}
281
+ {%- endif %}
282
+ {%- endfor %}
283
  {#- We assume max 1 tool call per message, and so we infer the tool call name #}
284
  {#- in "tool" messages from the most recent assistant tool call name #}
285
  {%- set tool_call = message.tool_calls[0] %}
 
288
  {%- endif %}
289
  {%- if message.content and message.thinking %}
290
  {{- raise_exception("Cannot pass both content and thinking in an assistant message with tool calls! Put the analysis message in one or the other, but not both.") }}
291
+ {%- elif message.content and not future_final_message.found %}
292
  {{- "<|start|>assistant<|channel|>analysis<|message|>" + message.content + "<|end|>" }}
293
+ {%- elif message.thinking and not future_final_message.found %}
294
  {{- "<|start|>assistant<|channel|>analysis<|message|>" + message.thinking + "<|end|>" }}
295
  {%- endif %}
296
  {{- "<|start|>assistant to=" }}
config.json CHANGED
@@ -60,12 +60,1762 @@
60
  "output_router_logits": false,
61
  "pad_token_id": 199999,
62
  "quantization": {
63
- "group_size": 64,
64
- "bits": 4
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
65
  },
66
  "quantization_config": {
67
- "group_size": 64,
68
- "bits": 4
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
69
  },
70
  "rms_norm_eps": 1e-05,
71
  "rope_scaling": {
 
60
  "output_router_logits": false,
61
  "pad_token_id": 199999,
62
  "quantization": {
63
+ "group_size": 32,
64
+ "bits": 4,
65
+ "mode": "mxfp4",
66
+ "model.embed_tokens": {
67
+ "group_size": 64,
68
+ "bits": 4,
69
+ "mode": "affine"
70
+ },
71
+ "model.layers.0.self_attn.q_proj": {
72
+ "group_size": 64,
73
+ "bits": 4,
74
+ "mode": "affine"
75
+ },
76
+ "model.layers.0.self_attn.k_proj": {
77
+ "group_size": 64,
78
+ "bits": 4,
79
+ "mode": "affine"
80
+ },
81
+ "model.layers.0.self_attn.v_proj": {
82
+ "group_size": 64,
83
+ "bits": 4,
84
+ "mode": "affine"
85
+ },
86
+ "model.layers.0.self_attn.o_proj": {
87
+ "group_size": 64,
88
+ "bits": 4,
89
+ "mode": "affine"
90
+ },
91
+ "model.layers.0.mlp.router": {
92
+ "group_size": 64,
93
+ "bits": 8
94
+ },
95
+ "model.layers.1.self_attn.q_proj": {
96
+ "group_size": 64,
97
+ "bits": 4,
98
+ "mode": "affine"
99
+ },
100
+ "model.layers.1.self_attn.k_proj": {
101
+ "group_size": 64,
102
+ "bits": 4,
103
+ "mode": "affine"
104
+ },
105
+ "model.layers.1.self_attn.v_proj": {
106
+ "group_size": 64,
107
+ "bits": 4,
108
+ "mode": "affine"
109
+ },
110
+ "model.layers.1.self_attn.o_proj": {
111
+ "group_size": 64,
112
+ "bits": 4,
113
+ "mode": "affine"
114
+ },
115
+ "model.layers.1.mlp.router": {
116
+ "group_size": 64,
117
+ "bits": 8
118
+ },
119
+ "model.layers.2.self_attn.q_proj": {
120
+ "group_size": 64,
121
+ "bits": 4,
122
+ "mode": "affine"
123
+ },
124
+ "model.layers.2.self_attn.k_proj": {
125
+ "group_size": 64,
126
+ "bits": 4,
127
+ "mode": "affine"
128
+ },
129
+ "model.layers.2.self_attn.v_proj": {
130
+ "group_size": 64,
131
+ "bits": 4,
132
+ "mode": "affine"
133
+ },
134
+ "model.layers.2.self_attn.o_proj": {
135
+ "group_size": 64,
136
+ "bits": 4,
137
+ "mode": "affine"
138
+ },
139
+ "model.layers.2.mlp.router": {
140
+ "group_size": 64,
141
+ "bits": 8
142
+ },
143
+ "model.layers.3.self_attn.q_proj": {
144
+ "group_size": 64,
145
+ "bits": 4,
146
+ "mode": "affine"
147
+ },
148
+ "model.layers.3.self_attn.k_proj": {
149
+ "group_size": 64,
150
+ "bits": 4,
151
+ "mode": "affine"
152
+ },
153
+ "model.layers.3.self_attn.v_proj": {
154
+ "group_size": 64,
155
+ "bits": 4,
156
+ "mode": "affine"
157
+ },
158
+ "model.layers.3.self_attn.o_proj": {
159
+ "group_size": 64,
160
+ "bits": 4,
161
+ "mode": "affine"
162
+ },
163
+ "model.layers.3.mlp.router": {
164
+ "group_size": 64,
165
+ "bits": 8
166
+ },
167
+ "model.layers.4.self_attn.q_proj": {
168
+ "group_size": 64,
169
+ "bits": 4,
170
+ "mode": "affine"
171
+ },
172
+ "model.layers.4.self_attn.k_proj": {
173
+ "group_size": 64,
174
+ "bits": 4,
175
+ "mode": "affine"
176
+ },
177
+ "model.layers.4.self_attn.v_proj": {
178
+ "group_size": 64,
179
+ "bits": 4,
180
+ "mode": "affine"
181
+ },
182
+ "model.layers.4.self_attn.o_proj": {
183
+ "group_size": 64,
184
+ "bits": 4,
185
+ "mode": "affine"
186
+ },
187
+ "model.layers.4.mlp.router": {
188
+ "group_size": 64,
189
+ "bits": 8
190
+ },
191
+ "model.layers.5.self_attn.q_proj": {
192
+ "group_size": 64,
193
+ "bits": 4,
194
+ "mode": "affine"
195
+ },
196
+ "model.layers.5.self_attn.k_proj": {
197
+ "group_size": 64,
198
+ "bits": 4,
199
+ "mode": "affine"
200
+ },
201
+ "model.layers.5.self_attn.v_proj": {
202
+ "group_size": 64,
203
+ "bits": 4,
204
+ "mode": "affine"
205
+ },
206
+ "model.layers.5.self_attn.o_proj": {
207
+ "group_size": 64,
208
+ "bits": 4,
209
+ "mode": "affine"
210
+ },
211
+ "model.layers.5.mlp.router": {
212
+ "group_size": 64,
213
+ "bits": 8
214
+ },
215
+ "model.layers.6.self_attn.q_proj": {
216
+ "group_size": 64,
217
+ "bits": 4,
218
+ "mode": "affine"
219
+ },
220
+ "model.layers.6.self_attn.k_proj": {
221
+ "group_size": 64,
222
+ "bits": 4,
223
+ "mode": "affine"
224
+ },
225
+ "model.layers.6.self_attn.v_proj": {
226
+ "group_size": 64,
227
+ "bits": 4,
228
+ "mode": "affine"
229
+ },
230
+ "model.layers.6.self_attn.o_proj": {
231
+ "group_size": 64,
232
+ "bits": 4,
233
+ "mode": "affine"
234
+ },
235
+ "model.layers.6.mlp.router": {
236
+ "group_size": 64,
237
+ "bits": 8
238
+ },
239
+ "model.layers.7.self_attn.q_proj": {
240
+ "group_size": 64,
241
+ "bits": 4,
242
+ "mode": "affine"
243
+ },
244
+ "model.layers.7.self_attn.k_proj": {
245
+ "group_size": 64,
246
+ "bits": 4,
247
+ "mode": "affine"
248
+ },
249
+ "model.layers.7.self_attn.v_proj": {
250
+ "group_size": 64,
251
+ "bits": 4,
252
+ "mode": "affine"
253
+ },
254
+ "model.layers.7.self_attn.o_proj": {
255
+ "group_size": 64,
256
+ "bits": 4,
257
+ "mode": "affine"
258
+ },
259
+ "model.layers.7.mlp.router": {
260
+ "group_size": 64,
261
+ "bits": 8
262
+ },
263
+ "model.layers.8.self_attn.q_proj": {
264
+ "group_size": 64,
265
+ "bits": 4,
266
+ "mode": "affine"
267
+ },
268
+ "model.layers.8.self_attn.k_proj": {
269
+ "group_size": 64,
270
+ "bits": 4,
271
+ "mode": "affine"
272
+ },
273
+ "model.layers.8.self_attn.v_proj": {
274
+ "group_size": 64,
275
+ "bits": 4,
276
+ "mode": "affine"
277
+ },
278
+ "model.layers.8.self_attn.o_proj": {
279
+ "group_size": 64,
280
+ "bits": 4,
281
+ "mode": "affine"
282
+ },
283
+ "model.layers.8.mlp.router": {
284
+ "group_size": 64,
285
+ "bits": 8
286
+ },
287
+ "model.layers.9.self_attn.q_proj": {
288
+ "group_size": 64,
289
+ "bits": 4,
290
+ "mode": "affine"
291
+ },
292
+ "model.layers.9.self_attn.k_proj": {
293
+ "group_size": 64,
294
+ "bits": 4,
295
+ "mode": "affine"
296
+ },
297
+ "model.layers.9.self_attn.v_proj": {
298
+ "group_size": 64,
299
+ "bits": 4,
300
+ "mode": "affine"
301
+ },
302
+ "model.layers.9.self_attn.o_proj": {
303
+ "group_size": 64,
304
+ "bits": 4,
305
+ "mode": "affine"
306
+ },
307
+ "model.layers.9.mlp.router": {
308
+ "group_size": 64,
309
+ "bits": 8
310
+ },
311
+ "model.layers.10.self_attn.q_proj": {
312
+ "group_size": 64,
313
+ "bits": 4,
314
+ "mode": "affine"
315
+ },
316
+ "model.layers.10.self_attn.k_proj": {
317
+ "group_size": 64,
318
+ "bits": 4,
319
+ "mode": "affine"
320
+ },
321
+ "model.layers.10.self_attn.v_proj": {
322
+ "group_size": 64,
323
+ "bits": 4,
324
+ "mode": "affine"
325
+ },
326
+ "model.layers.10.self_attn.o_proj": {
327
+ "group_size": 64,
328
+ "bits": 4,
329
+ "mode": "affine"
330
+ },
331
+ "model.layers.10.mlp.router": {
332
+ "group_size": 64,
333
+ "bits": 8
334
+ },
335
+ "model.layers.11.self_attn.q_proj": {
336
+ "group_size": 64,
337
+ "bits": 4,
338
+ "mode": "affine"
339
+ },
340
+ "model.layers.11.self_attn.k_proj": {
341
+ "group_size": 64,
342
+ "bits": 4,
343
+ "mode": "affine"
344
+ },
345
+ "model.layers.11.self_attn.v_proj": {
346
+ "group_size": 64,
347
+ "bits": 4,
348
+ "mode": "affine"
349
+ },
350
+ "model.layers.11.self_attn.o_proj": {
351
+ "group_size": 64,
352
+ "bits": 4,
353
+ "mode": "affine"
354
+ },
355
+ "model.layers.11.mlp.router": {
356
+ "group_size": 64,
357
+ "bits": 8
358
+ },
359
+ "model.layers.12.self_attn.q_proj": {
360
+ "group_size": 64,
361
+ "bits": 4,
362
+ "mode": "affine"
363
+ },
364
+ "model.layers.12.self_attn.k_proj": {
365
+ "group_size": 64,
366
+ "bits": 4,
367
+ "mode": "affine"
368
+ },
369
+ "model.layers.12.self_attn.v_proj": {
370
+ "group_size": 64,
371
+ "bits": 4,
372
+ "mode": "affine"
373
+ },
374
+ "model.layers.12.self_attn.o_proj": {
375
+ "group_size": 64,
376
+ "bits": 4,
377
+ "mode": "affine"
378
+ },
379
+ "model.layers.12.mlp.router": {
380
+ "group_size": 64,
381
+ "bits": 8
382
+ },
383
+ "model.layers.13.self_attn.q_proj": {
384
+ "group_size": 64,
385
+ "bits": 4,
386
+ "mode": "affine"
387
+ },
388
+ "model.layers.13.self_attn.k_proj": {
389
+ "group_size": 64,
390
+ "bits": 4,
391
+ "mode": "affine"
392
+ },
393
+ "model.layers.13.self_attn.v_proj": {
394
+ "group_size": 64,
395
+ "bits": 4,
396
+ "mode": "affine"
397
+ },
398
+ "model.layers.13.self_attn.o_proj": {
399
+ "group_size": 64,
400
+ "bits": 4,
401
+ "mode": "affine"
402
+ },
403
+ "model.layers.13.mlp.router": {
404
+ "group_size": 64,
405
+ "bits": 8
406
+ },
407
+ "model.layers.14.self_attn.q_proj": {
408
+ "group_size": 64,
409
+ "bits": 4,
410
+ "mode": "affine"
411
+ },
412
+ "model.layers.14.self_attn.k_proj": {
413
+ "group_size": 64,
414
+ "bits": 4,
415
+ "mode": "affine"
416
+ },
417
+ "model.layers.14.self_attn.v_proj": {
418
+ "group_size": 64,
419
+ "bits": 4,
420
+ "mode": "affine"
421
+ },
422
+ "model.layers.14.self_attn.o_proj": {
423
+ "group_size": 64,
424
+ "bits": 4,
425
+ "mode": "affine"
426
+ },
427
+ "model.layers.14.mlp.router": {
428
+ "group_size": 64,
429
+ "bits": 8
430
+ },
431
+ "model.layers.15.self_attn.q_proj": {
432
+ "group_size": 64,
433
+ "bits": 4,
434
+ "mode": "affine"
435
+ },
436
+ "model.layers.15.self_attn.k_proj": {
437
+ "group_size": 64,
438
+ "bits": 4,
439
+ "mode": "affine"
440
+ },
441
+ "model.layers.15.self_attn.v_proj": {
442
+ "group_size": 64,
443
+ "bits": 4,
444
+ "mode": "affine"
445
+ },
446
+ "model.layers.15.self_attn.o_proj": {
447
+ "group_size": 64,
448
+ "bits": 4,
449
+ "mode": "affine"
450
+ },
451
+ "model.layers.15.mlp.router": {
452
+ "group_size": 64,
453
+ "bits": 8
454
+ },
455
+ "model.layers.16.self_attn.q_proj": {
456
+ "group_size": 64,
457
+ "bits": 4,
458
+ "mode": "affine"
459
+ },
460
+ "model.layers.16.self_attn.k_proj": {
461
+ "group_size": 64,
462
+ "bits": 4,
463
+ "mode": "affine"
464
+ },
465
+ "model.layers.16.self_attn.v_proj": {
466
+ "group_size": 64,
467
+ "bits": 4,
468
+ "mode": "affine"
469
+ },
470
+ "model.layers.16.self_attn.o_proj": {
471
+ "group_size": 64,
472
+ "bits": 4,
473
+ "mode": "affine"
474
+ },
475
+ "model.layers.16.mlp.router": {
476
+ "group_size": 64,
477
+ "bits": 8
478
+ },
479
+ "model.layers.17.self_attn.q_proj": {
480
+ "group_size": 64,
481
+ "bits": 4,
482
+ "mode": "affine"
483
+ },
484
+ "model.layers.17.self_attn.k_proj": {
485
+ "group_size": 64,
486
+ "bits": 4,
487
+ "mode": "affine"
488
+ },
489
+ "model.layers.17.self_attn.v_proj": {
490
+ "group_size": 64,
491
+ "bits": 4,
492
+ "mode": "affine"
493
+ },
494
+ "model.layers.17.self_attn.o_proj": {
495
+ "group_size": 64,
496
+ "bits": 4,
497
+ "mode": "affine"
498
+ },
499
+ "model.layers.17.mlp.router": {
500
+ "group_size": 64,
501
+ "bits": 8
502
+ },
503
+ "model.layers.18.self_attn.q_proj": {
504
+ "group_size": 64,
505
+ "bits": 4,
506
+ "mode": "affine"
507
+ },
508
+ "model.layers.18.self_attn.k_proj": {
509
+ "group_size": 64,
510
+ "bits": 4,
511
+ "mode": "affine"
512
+ },
513
+ "model.layers.18.self_attn.v_proj": {
514
+ "group_size": 64,
515
+ "bits": 4,
516
+ "mode": "affine"
517
+ },
518
+ "model.layers.18.self_attn.o_proj": {
519
+ "group_size": 64,
520
+ "bits": 4,
521
+ "mode": "affine"
522
+ },
523
+ "model.layers.18.mlp.router": {
524
+ "group_size": 64,
525
+ "bits": 8
526
+ },
527
+ "model.layers.19.self_attn.q_proj": {
528
+ "group_size": 64,
529
+ "bits": 4,
530
+ "mode": "affine"
531
+ },
532
+ "model.layers.19.self_attn.k_proj": {
533
+ "group_size": 64,
534
+ "bits": 4,
535
+ "mode": "affine"
536
+ },
537
+ "model.layers.19.self_attn.v_proj": {
538
+ "group_size": 64,
539
+ "bits": 4,
540
+ "mode": "affine"
541
+ },
542
+ "model.layers.19.self_attn.o_proj": {
543
+ "group_size": 64,
544
+ "bits": 4,
545
+ "mode": "affine"
546
+ },
547
+ "model.layers.19.mlp.router": {
548
+ "group_size": 64,
549
+ "bits": 8
550
+ },
551
+ "model.layers.20.self_attn.q_proj": {
552
+ "group_size": 64,
553
+ "bits": 4,
554
+ "mode": "affine"
555
+ },
556
+ "model.layers.20.self_attn.k_proj": {
557
+ "group_size": 64,
558
+ "bits": 4,
559
+ "mode": "affine"
560
+ },
561
+ "model.layers.20.self_attn.v_proj": {
562
+ "group_size": 64,
563
+ "bits": 4,
564
+ "mode": "affine"
565
+ },
566
+ "model.layers.20.self_attn.o_proj": {
567
+ "group_size": 64,
568
+ "bits": 4,
569
+ "mode": "affine"
570
+ },
571
+ "model.layers.20.mlp.router": {
572
+ "group_size": 64,
573
+ "bits": 8
574
+ },
575
+ "model.layers.21.self_attn.q_proj": {
576
+ "group_size": 64,
577
+ "bits": 4,
578
+ "mode": "affine"
579
+ },
580
+ "model.layers.21.self_attn.k_proj": {
581
+ "group_size": 64,
582
+ "bits": 4,
583
+ "mode": "affine"
584
+ },
585
+ "model.layers.21.self_attn.v_proj": {
586
+ "group_size": 64,
587
+ "bits": 4,
588
+ "mode": "affine"
589
+ },
590
+ "model.layers.21.self_attn.o_proj": {
591
+ "group_size": 64,
592
+ "bits": 4,
593
+ "mode": "affine"
594
+ },
595
+ "model.layers.21.mlp.router": {
596
+ "group_size": 64,
597
+ "bits": 8
598
+ },
599
+ "model.layers.22.self_attn.q_proj": {
600
+ "group_size": 64,
601
+ "bits": 4,
602
+ "mode": "affine"
603
+ },
604
+ "model.layers.22.self_attn.k_proj": {
605
+ "group_size": 64,
606
+ "bits": 4,
607
+ "mode": "affine"
608
+ },
609
+ "model.layers.22.self_attn.v_proj": {
610
+ "group_size": 64,
611
+ "bits": 4,
612
+ "mode": "affine"
613
+ },
614
+ "model.layers.22.self_attn.o_proj": {
615
+ "group_size": 64,
616
+ "bits": 4,
617
+ "mode": "affine"
618
+ },
619
+ "model.layers.22.mlp.router": {
620
+ "group_size": 64,
621
+ "bits": 8
622
+ },
623
+ "model.layers.23.self_attn.q_proj": {
624
+ "group_size": 64,
625
+ "bits": 4,
626
+ "mode": "affine"
627
+ },
628
+ "model.layers.23.self_attn.k_proj": {
629
+ "group_size": 64,
630
+ "bits": 4,
631
+ "mode": "affine"
632
+ },
633
+ "model.layers.23.self_attn.v_proj": {
634
+ "group_size": 64,
635
+ "bits": 4,
636
+ "mode": "affine"
637
+ },
638
+ "model.layers.23.self_attn.o_proj": {
639
+ "group_size": 64,
640
+ "bits": 4,
641
+ "mode": "affine"
642
+ },
643
+ "model.layers.23.mlp.router": {
644
+ "group_size": 64,
645
+ "bits": 8
646
+ },
647
+ "model.layers.24.self_attn.q_proj": {
648
+ "group_size": 64,
649
+ "bits": 4,
650
+ "mode": "affine"
651
+ },
652
+ "model.layers.24.self_attn.k_proj": {
653
+ "group_size": 64,
654
+ "bits": 4,
655
+ "mode": "affine"
656
+ },
657
+ "model.layers.24.self_attn.v_proj": {
658
+ "group_size": 64,
659
+ "bits": 4,
660
+ "mode": "affine"
661
+ },
662
+ "model.layers.24.self_attn.o_proj": {
663
+ "group_size": 64,
664
+ "bits": 4,
665
+ "mode": "affine"
666
+ },
667
+ "model.layers.24.mlp.router": {
668
+ "group_size": 64,
669
+ "bits": 8
670
+ },
671
+ "model.layers.25.self_attn.q_proj": {
672
+ "group_size": 64,
673
+ "bits": 4,
674
+ "mode": "affine"
675
+ },
676
+ "model.layers.25.self_attn.k_proj": {
677
+ "group_size": 64,
678
+ "bits": 4,
679
+ "mode": "affine"
680
+ },
681
+ "model.layers.25.self_attn.v_proj": {
682
+ "group_size": 64,
683
+ "bits": 4,
684
+ "mode": "affine"
685
+ },
686
+ "model.layers.25.self_attn.o_proj": {
687
+ "group_size": 64,
688
+ "bits": 4,
689
+ "mode": "affine"
690
+ },
691
+ "model.layers.25.mlp.router": {
692
+ "group_size": 64,
693
+ "bits": 8
694
+ },
695
+ "model.layers.26.self_attn.q_proj": {
696
+ "group_size": 64,
697
+ "bits": 4,
698
+ "mode": "affine"
699
+ },
700
+ "model.layers.26.self_attn.k_proj": {
701
+ "group_size": 64,
702
+ "bits": 4,
703
+ "mode": "affine"
704
+ },
705
+ "model.layers.26.self_attn.v_proj": {
706
+ "group_size": 64,
707
+ "bits": 4,
708
+ "mode": "affine"
709
+ },
710
+ "model.layers.26.self_attn.o_proj": {
711
+ "group_size": 64,
712
+ "bits": 4,
713
+ "mode": "affine"
714
+ },
715
+ "model.layers.26.mlp.router": {
716
+ "group_size": 64,
717
+ "bits": 8
718
+ },
719
+ "model.layers.27.self_attn.q_proj": {
720
+ "group_size": 64,
721
+ "bits": 4,
722
+ "mode": "affine"
723
+ },
724
+ "model.layers.27.self_attn.k_proj": {
725
+ "group_size": 64,
726
+ "bits": 4,
727
+ "mode": "affine"
728
+ },
729
+ "model.layers.27.self_attn.v_proj": {
730
+ "group_size": 64,
731
+ "bits": 4,
732
+ "mode": "affine"
733
+ },
734
+ "model.layers.27.self_attn.o_proj": {
735
+ "group_size": 64,
736
+ "bits": 4,
737
+ "mode": "affine"
738
+ },
739
+ "model.layers.27.mlp.router": {
740
+ "group_size": 64,
741
+ "bits": 8
742
+ },
743
+ "model.layers.28.self_attn.q_proj": {
744
+ "group_size": 64,
745
+ "bits": 4,
746
+ "mode": "affine"
747
+ },
748
+ "model.layers.28.self_attn.k_proj": {
749
+ "group_size": 64,
750
+ "bits": 4,
751
+ "mode": "affine"
752
+ },
753
+ "model.layers.28.self_attn.v_proj": {
754
+ "group_size": 64,
755
+ "bits": 4,
756
+ "mode": "affine"
757
+ },
758
+ "model.layers.28.self_attn.o_proj": {
759
+ "group_size": 64,
760
+ "bits": 4,
761
+ "mode": "affine"
762
+ },
763
+ "model.layers.28.mlp.router": {
764
+ "group_size": 64,
765
+ "bits": 8
766
+ },
767
+ "model.layers.29.self_attn.q_proj": {
768
+ "group_size": 64,
769
+ "bits": 4,
770
+ "mode": "affine"
771
+ },
772
+ "model.layers.29.self_attn.k_proj": {
773
+ "group_size": 64,
774
+ "bits": 4,
775
+ "mode": "affine"
776
+ },
777
+ "model.layers.29.self_attn.v_proj": {
778
+ "group_size": 64,
779
+ "bits": 4,
780
+ "mode": "affine"
781
+ },
782
+ "model.layers.29.self_attn.o_proj": {
783
+ "group_size": 64,
784
+ "bits": 4,
785
+ "mode": "affine"
786
+ },
787
+ "model.layers.29.mlp.router": {
788
+ "group_size": 64,
789
+ "bits": 8
790
+ },
791
+ "model.layers.30.self_attn.q_proj": {
792
+ "group_size": 64,
793
+ "bits": 4,
794
+ "mode": "affine"
795
+ },
796
+ "model.layers.30.self_attn.k_proj": {
797
+ "group_size": 64,
798
+ "bits": 4,
799
+ "mode": "affine"
800
+ },
801
+ "model.layers.30.self_attn.v_proj": {
802
+ "group_size": 64,
803
+ "bits": 4,
804
+ "mode": "affine"
805
+ },
806
+ "model.layers.30.self_attn.o_proj": {
807
+ "group_size": 64,
808
+ "bits": 4,
809
+ "mode": "affine"
810
+ },
811
+ "model.layers.30.mlp.router": {
812
+ "group_size": 64,
813
+ "bits": 8
814
+ },
815
+ "model.layers.31.self_attn.q_proj": {
816
+ "group_size": 64,
817
+ "bits": 4,
818
+ "mode": "affine"
819
+ },
820
+ "model.layers.31.self_attn.k_proj": {
821
+ "group_size": 64,
822
+ "bits": 4,
823
+ "mode": "affine"
824
+ },
825
+ "model.layers.31.self_attn.v_proj": {
826
+ "group_size": 64,
827
+ "bits": 4,
828
+ "mode": "affine"
829
+ },
830
+ "model.layers.31.self_attn.o_proj": {
831
+ "group_size": 64,
832
+ "bits": 4,
833
+ "mode": "affine"
834
+ },
835
+ "model.layers.31.mlp.router": {
836
+ "group_size": 64,
837
+ "bits": 8
838
+ },
839
+ "model.layers.32.self_attn.q_proj": {
840
+ "group_size": 64,
841
+ "bits": 4,
842
+ "mode": "affine"
843
+ },
844
+ "model.layers.32.self_attn.k_proj": {
845
+ "group_size": 64,
846
+ "bits": 4,
847
+ "mode": "affine"
848
+ },
849
+ "model.layers.32.self_attn.v_proj": {
850
+ "group_size": 64,
851
+ "bits": 4,
852
+ "mode": "affine"
853
+ },
854
+ "model.layers.32.self_attn.o_proj": {
855
+ "group_size": 64,
856
+ "bits": 4,
857
+ "mode": "affine"
858
+ },
859
+ "model.layers.32.mlp.router": {
860
+ "group_size": 64,
861
+ "bits": 8
862
+ },
863
+ "model.layers.33.self_attn.q_proj": {
864
+ "group_size": 64,
865
+ "bits": 4,
866
+ "mode": "affine"
867
+ },
868
+ "model.layers.33.self_attn.k_proj": {
869
+ "group_size": 64,
870
+ "bits": 4,
871
+ "mode": "affine"
872
+ },
873
+ "model.layers.33.self_attn.v_proj": {
874
+ "group_size": 64,
875
+ "bits": 4,
876
+ "mode": "affine"
877
+ },
878
+ "model.layers.33.self_attn.o_proj": {
879
+ "group_size": 64,
880
+ "bits": 4,
881
+ "mode": "affine"
882
+ },
883
+ "model.layers.33.mlp.router": {
884
+ "group_size": 64,
885
+ "bits": 8
886
+ },
887
+ "model.layers.34.self_attn.q_proj": {
888
+ "group_size": 64,
889
+ "bits": 4,
890
+ "mode": "affine"
891
+ },
892
+ "model.layers.34.self_attn.k_proj": {
893
+ "group_size": 64,
894
+ "bits": 4,
895
+ "mode": "affine"
896
+ },
897
+ "model.layers.34.self_attn.v_proj": {
898
+ "group_size": 64,
899
+ "bits": 4,
900
+ "mode": "affine"
901
+ },
902
+ "model.layers.34.self_attn.o_proj": {
903
+ "group_size": 64,
904
+ "bits": 4,
905
+ "mode": "affine"
906
+ },
907
+ "model.layers.34.mlp.router": {
908
+ "group_size": 64,
909
+ "bits": 8
910
+ },
911
+ "model.layers.35.self_attn.q_proj": {
912
+ "group_size": 64,
913
+ "bits": 4,
914
+ "mode": "affine"
915
+ },
916
+ "model.layers.35.self_attn.k_proj": {
917
+ "group_size": 64,
918
+ "bits": 4,
919
+ "mode": "affine"
920
+ },
921
+ "model.layers.35.self_attn.v_proj": {
922
+ "group_size": 64,
923
+ "bits": 4,
924
+ "mode": "affine"
925
+ },
926
+ "model.layers.35.self_attn.o_proj": {
927
+ "group_size": 64,
928
+ "bits": 4,
929
+ "mode": "affine"
930
+ },
931
+ "model.layers.35.mlp.router": {
932
+ "group_size": 64,
933
+ "bits": 8
934
+ },
935
+ "lm_head": {
936
+ "group_size": 64,
937
+ "bits": 4,
938
+ "mode": "affine"
939
+ }
940
  },
941
  "quantization_config": {
942
+ "group_size": 32,
943
+ "bits": 4,
944
+ "mode": "mxfp4",
945
+ "model.embed_tokens": {
946
+ "group_size": 64,
947
+ "bits": 4,
948
+ "mode": "affine"
949
+ },
950
+ "model.layers.0.self_attn.q_proj": {
951
+ "group_size": 64,
952
+ "bits": 4,
953
+ "mode": "affine"
954
+ },
955
+ "model.layers.0.self_attn.k_proj": {
956
+ "group_size": 64,
957
+ "bits": 4,
958
+ "mode": "affine"
959
+ },
960
+ "model.layers.0.self_attn.v_proj": {
961
+ "group_size": 64,
962
+ "bits": 4,
963
+ "mode": "affine"
964
+ },
965
+ "model.layers.0.self_attn.o_proj": {
966
+ "group_size": 64,
967
+ "bits": 4,
968
+ "mode": "affine"
969
+ },
970
+ "model.layers.0.mlp.router": {
971
+ "group_size": 64,
972
+ "bits": 8
973
+ },
974
+ "model.layers.1.self_attn.q_proj": {
975
+ "group_size": 64,
976
+ "bits": 4,
977
+ "mode": "affine"
978
+ },
979
+ "model.layers.1.self_attn.k_proj": {
980
+ "group_size": 64,
981
+ "bits": 4,
982
+ "mode": "affine"
983
+ },
984
+ "model.layers.1.self_attn.v_proj": {
985
+ "group_size": 64,
986
+ "bits": 4,
987
+ "mode": "affine"
988
+ },
989
+ "model.layers.1.self_attn.o_proj": {
990
+ "group_size": 64,
991
+ "bits": 4,
992
+ "mode": "affine"
993
+ },
994
+ "model.layers.1.mlp.router": {
995
+ "group_size": 64,
996
+ "bits": 8
997
+ },
998
+ "model.layers.2.self_attn.q_proj": {
999
+ "group_size": 64,
1000
+ "bits": 4,
1001
+ "mode": "affine"
1002
+ },
1003
+ "model.layers.2.self_attn.k_proj": {
1004
+ "group_size": 64,
1005
+ "bits": 4,
1006
+ "mode": "affine"
1007
+ },
1008
+ "model.layers.2.self_attn.v_proj": {
1009
+ "group_size": 64,
1010
+ "bits": 4,
1011
+ "mode": "affine"
1012
+ },
1013
+ "model.layers.2.self_attn.o_proj": {
1014
+ "group_size": 64,
1015
+ "bits": 4,
1016
+ "mode": "affine"
1017
+ },
1018
+ "model.layers.2.mlp.router": {
1019
+ "group_size": 64,
1020
+ "bits": 8
1021
+ },
1022
+ "model.layers.3.self_attn.q_proj": {
1023
+ "group_size": 64,
1024
+ "bits": 4,
1025
+ "mode": "affine"
1026
+ },
1027
+ "model.layers.3.self_attn.k_proj": {
1028
+ "group_size": 64,
1029
+ "bits": 4,
1030
+ "mode": "affine"
1031
+ },
1032
+ "model.layers.3.self_attn.v_proj": {
1033
+ "group_size": 64,
1034
+ "bits": 4,
1035
+ "mode": "affine"
1036
+ },
1037
+ "model.layers.3.self_attn.o_proj": {
1038
+ "group_size": 64,
1039
+ "bits": 4,
1040
+ "mode": "affine"
1041
+ },
1042
+ "model.layers.3.mlp.router": {
1043
+ "group_size": 64,
1044
+ "bits": 8
1045
+ },
1046
+ "model.layers.4.self_attn.q_proj": {
1047
+ "group_size": 64,
1048
+ "bits": 4,
1049
+ "mode": "affine"
1050
+ },
1051
+ "model.layers.4.self_attn.k_proj": {
1052
+ "group_size": 64,
1053
+ "bits": 4,
1054
+ "mode": "affine"
1055
+ },
1056
+ "model.layers.4.self_attn.v_proj": {
1057
+ "group_size": 64,
1058
+ "bits": 4,
1059
+ "mode": "affine"
1060
+ },
1061
+ "model.layers.4.self_attn.o_proj": {
1062
+ "group_size": 64,
1063
+ "bits": 4,
1064
+ "mode": "affine"
1065
+ },
1066
+ "model.layers.4.mlp.router": {
1067
+ "group_size": 64,
1068
+ "bits": 8
1069
+ },
1070
+ "model.layers.5.self_attn.q_proj": {
1071
+ "group_size": 64,
1072
+ "bits": 4,
1073
+ "mode": "affine"
1074
+ },
1075
+ "model.layers.5.self_attn.k_proj": {
1076
+ "group_size": 64,
1077
+ "bits": 4,
1078
+ "mode": "affine"
1079
+ },
1080
+ "model.layers.5.self_attn.v_proj": {
1081
+ "group_size": 64,
1082
+ "bits": 4,
1083
+ "mode": "affine"
1084
+ },
1085
+ "model.layers.5.self_attn.o_proj": {
1086
+ "group_size": 64,
1087
+ "bits": 4,
1088
+ "mode": "affine"
1089
+ },
1090
+ "model.layers.5.mlp.router": {
1091
+ "group_size": 64,
1092
+ "bits": 8
1093
+ },
1094
+ "model.layers.6.self_attn.q_proj": {
1095
+ "group_size": 64,
1096
+ "bits": 4,
1097
+ "mode": "affine"
1098
+ },
1099
+ "model.layers.6.self_attn.k_proj": {
1100
+ "group_size": 64,
1101
+ "bits": 4,
1102
+ "mode": "affine"
1103
+ },
1104
+ "model.layers.6.self_attn.v_proj": {
1105
+ "group_size": 64,
1106
+ "bits": 4,
1107
+ "mode": "affine"
1108
+ },
1109
+ "model.layers.6.self_attn.o_proj": {
1110
+ "group_size": 64,
1111
+ "bits": 4,
1112
+ "mode": "affine"
1113
+ },
1114
+ "model.layers.6.mlp.router": {
1115
+ "group_size": 64,
1116
+ "bits": 8
1117
+ },
1118
+ "model.layers.7.self_attn.q_proj": {
1119
+ "group_size": 64,
1120
+ "bits": 4,
1121
+ "mode": "affine"
1122
+ },
1123
+ "model.layers.7.self_attn.k_proj": {
1124
+ "group_size": 64,
1125
+ "bits": 4,
1126
+ "mode": "affine"
1127
+ },
1128
+ "model.layers.7.self_attn.v_proj": {
1129
+ "group_size": 64,
1130
+ "bits": 4,
1131
+ "mode": "affine"
1132
+ },
1133
+ "model.layers.7.self_attn.o_proj": {
1134
+ "group_size": 64,
1135
+ "bits": 4,
1136
+ "mode": "affine"
1137
+ },
1138
+ "model.layers.7.mlp.router": {
1139
+ "group_size": 64,
1140
+ "bits": 8
1141
+ },
1142
+ "model.layers.8.self_attn.q_proj": {
1143
+ "group_size": 64,
1144
+ "bits": 4,
1145
+ "mode": "affine"
1146
+ },
1147
+ "model.layers.8.self_attn.k_proj": {
1148
+ "group_size": 64,
1149
+ "bits": 4,
1150
+ "mode": "affine"
1151
+ },
1152
+ "model.layers.8.self_attn.v_proj": {
1153
+ "group_size": 64,
1154
+ "bits": 4,
1155
+ "mode": "affine"
1156
+ },
1157
+ "model.layers.8.self_attn.o_proj": {
1158
+ "group_size": 64,
1159
+ "bits": 4,
1160
+ "mode": "affine"
1161
+ },
1162
+ "model.layers.8.mlp.router": {
1163
+ "group_size": 64,
1164
+ "bits": 8
1165
+ },
1166
+ "model.layers.9.self_attn.q_proj": {
1167
+ "group_size": 64,
1168
+ "bits": 4,
1169
+ "mode": "affine"
1170
+ },
1171
+ "model.layers.9.self_attn.k_proj": {
1172
+ "group_size": 64,
1173
+ "bits": 4,
1174
+ "mode": "affine"
1175
+ },
1176
+ "model.layers.9.self_attn.v_proj": {
1177
+ "group_size": 64,
1178
+ "bits": 4,
1179
+ "mode": "affine"
1180
+ },
1181
+ "model.layers.9.self_attn.o_proj": {
1182
+ "group_size": 64,
1183
+ "bits": 4,
1184
+ "mode": "affine"
1185
+ },
1186
+ "model.layers.9.mlp.router": {
1187
+ "group_size": 64,
1188
+ "bits": 8
1189
+ },
1190
+ "model.layers.10.self_attn.q_proj": {
1191
+ "group_size": 64,
1192
+ "bits": 4,
1193
+ "mode": "affine"
1194
+ },
1195
+ "model.layers.10.self_attn.k_proj": {
1196
+ "group_size": 64,
1197
+ "bits": 4,
1198
+ "mode": "affine"
1199
+ },
1200
+ "model.layers.10.self_attn.v_proj": {
1201
+ "group_size": 64,
1202
+ "bits": 4,
1203
+ "mode": "affine"
1204
+ },
1205
+ "model.layers.10.self_attn.o_proj": {
1206
+ "group_size": 64,
1207
+ "bits": 4,
1208
+ "mode": "affine"
1209
+ },
1210
+ "model.layers.10.mlp.router": {
1211
+ "group_size": 64,
1212
+ "bits": 8
1213
+ },
1214
+ "model.layers.11.self_attn.q_proj": {
1215
+ "group_size": 64,
1216
+ "bits": 4,
1217
+ "mode": "affine"
1218
+ },
1219
+ "model.layers.11.self_attn.k_proj": {
1220
+ "group_size": 64,
1221
+ "bits": 4,
1222
+ "mode": "affine"
1223
+ },
1224
+ "model.layers.11.self_attn.v_proj": {
1225
+ "group_size": 64,
1226
+ "bits": 4,
1227
+ "mode": "affine"
1228
+ },
1229
+ "model.layers.11.self_attn.o_proj": {
1230
+ "group_size": 64,
1231
+ "bits": 4,
1232
+ "mode": "affine"
1233
+ },
1234
+ "model.layers.11.mlp.router": {
1235
+ "group_size": 64,
1236
+ "bits": 8
1237
+ },
1238
+ "model.layers.12.self_attn.q_proj": {
1239
+ "group_size": 64,
1240
+ "bits": 4,
1241
+ "mode": "affine"
1242
+ },
1243
+ "model.layers.12.self_attn.k_proj": {
1244
+ "group_size": 64,
1245
+ "bits": 4,
1246
+ "mode": "affine"
1247
+ },
1248
+ "model.layers.12.self_attn.v_proj": {
1249
+ "group_size": 64,
1250
+ "bits": 4,
1251
+ "mode": "affine"
1252
+ },
1253
+ "model.layers.12.self_attn.o_proj": {
1254
+ "group_size": 64,
1255
+ "bits": 4,
1256
+ "mode": "affine"
1257
+ },
1258
+ "model.layers.12.mlp.router": {
1259
+ "group_size": 64,
1260
+ "bits": 8
1261
+ },
1262
+ "model.layers.13.self_attn.q_proj": {
1263
+ "group_size": 64,
1264
+ "bits": 4,
1265
+ "mode": "affine"
1266
+ },
1267
+ "model.layers.13.self_attn.k_proj": {
1268
+ "group_size": 64,
1269
+ "bits": 4,
1270
+ "mode": "affine"
1271
+ },
1272
+ "model.layers.13.self_attn.v_proj": {
1273
+ "group_size": 64,
1274
+ "bits": 4,
1275
+ "mode": "affine"
1276
+ },
1277
+ "model.layers.13.self_attn.o_proj": {
1278
+ "group_size": 64,
1279
+ "bits": 4,
1280
+ "mode": "affine"
1281
+ },
1282
+ "model.layers.13.mlp.router": {
1283
+ "group_size": 64,
1284
+ "bits": 8
1285
+ },
1286
+ "model.layers.14.self_attn.q_proj": {
1287
+ "group_size": 64,
1288
+ "bits": 4,
1289
+ "mode": "affine"
1290
+ },
1291
+ "model.layers.14.self_attn.k_proj": {
1292
+ "group_size": 64,
1293
+ "bits": 4,
1294
+ "mode": "affine"
1295
+ },
1296
+ "model.layers.14.self_attn.v_proj": {
1297
+ "group_size": 64,
1298
+ "bits": 4,
1299
+ "mode": "affine"
1300
+ },
1301
+ "model.layers.14.self_attn.o_proj": {
1302
+ "group_size": 64,
1303
+ "bits": 4,
1304
+ "mode": "affine"
1305
+ },
1306
+ "model.layers.14.mlp.router": {
1307
+ "group_size": 64,
1308
+ "bits": 8
1309
+ },
1310
+ "model.layers.15.self_attn.q_proj": {
1311
+ "group_size": 64,
1312
+ "bits": 4,
1313
+ "mode": "affine"
1314
+ },
1315
+ "model.layers.15.self_attn.k_proj": {
1316
+ "group_size": 64,
1317
+ "bits": 4,
1318
+ "mode": "affine"
1319
+ },
1320
+ "model.layers.15.self_attn.v_proj": {
1321
+ "group_size": 64,
1322
+ "bits": 4,
1323
+ "mode": "affine"
1324
+ },
1325
+ "model.layers.15.self_attn.o_proj": {
1326
+ "group_size": 64,
1327
+ "bits": 4,
1328
+ "mode": "affine"
1329
+ },
1330
+ "model.layers.15.mlp.router": {
1331
+ "group_size": 64,
1332
+ "bits": 8
1333
+ },
1334
+ "model.layers.16.self_attn.q_proj": {
1335
+ "group_size": 64,
1336
+ "bits": 4,
1337
+ "mode": "affine"
1338
+ },
1339
+ "model.layers.16.self_attn.k_proj": {
1340
+ "group_size": 64,
1341
+ "bits": 4,
1342
+ "mode": "affine"
1343
+ },
1344
+ "model.layers.16.self_attn.v_proj": {
1345
+ "group_size": 64,
1346
+ "bits": 4,
1347
+ "mode": "affine"
1348
+ },
1349
+ "model.layers.16.self_attn.o_proj": {
1350
+ "group_size": 64,
1351
+ "bits": 4,
1352
+ "mode": "affine"
1353
+ },
1354
+ "model.layers.16.mlp.router": {
1355
+ "group_size": 64,
1356
+ "bits": 8
1357
+ },
1358
+ "model.layers.17.self_attn.q_proj": {
1359
+ "group_size": 64,
1360
+ "bits": 4,
1361
+ "mode": "affine"
1362
+ },
1363
+ "model.layers.17.self_attn.k_proj": {
1364
+ "group_size": 64,
1365
+ "bits": 4,
1366
+ "mode": "affine"
1367
+ },
1368
+ "model.layers.17.self_attn.v_proj": {
1369
+ "group_size": 64,
1370
+ "bits": 4,
1371
+ "mode": "affine"
1372
+ },
1373
+ "model.layers.17.self_attn.o_proj": {
1374
+ "group_size": 64,
1375
+ "bits": 4,
1376
+ "mode": "affine"
1377
+ },
1378
+ "model.layers.17.mlp.router": {
1379
+ "group_size": 64,
1380
+ "bits": 8
1381
+ },
1382
+ "model.layers.18.self_attn.q_proj": {
1383
+ "group_size": 64,
1384
+ "bits": 4,
1385
+ "mode": "affine"
1386
+ },
1387
+ "model.layers.18.self_attn.k_proj": {
1388
+ "group_size": 64,
1389
+ "bits": 4,
1390
+ "mode": "affine"
1391
+ },
1392
+ "model.layers.18.self_attn.v_proj": {
1393
+ "group_size": 64,
1394
+ "bits": 4,
1395
+ "mode": "affine"
1396
+ },
1397
+ "model.layers.18.self_attn.o_proj": {
1398
+ "group_size": 64,
1399
+ "bits": 4,
1400
+ "mode": "affine"
1401
+ },
1402
+ "model.layers.18.mlp.router": {
1403
+ "group_size": 64,
1404
+ "bits": 8
1405
+ },
1406
+ "model.layers.19.self_attn.q_proj": {
1407
+ "group_size": 64,
1408
+ "bits": 4,
1409
+ "mode": "affine"
1410
+ },
1411
+ "model.layers.19.self_attn.k_proj": {
1412
+ "group_size": 64,
1413
+ "bits": 4,
1414
+ "mode": "affine"
1415
+ },
1416
+ "model.layers.19.self_attn.v_proj": {
1417
+ "group_size": 64,
1418
+ "bits": 4,
1419
+ "mode": "affine"
1420
+ },
1421
+ "model.layers.19.self_attn.o_proj": {
1422
+ "group_size": 64,
1423
+ "bits": 4,
1424
+ "mode": "affine"
1425
+ },
1426
+ "model.layers.19.mlp.router": {
1427
+ "group_size": 64,
1428
+ "bits": 8
1429
+ },
1430
+ "model.layers.20.self_attn.q_proj": {
1431
+ "group_size": 64,
1432
+ "bits": 4,
1433
+ "mode": "affine"
1434
+ },
1435
+ "model.layers.20.self_attn.k_proj": {
1436
+ "group_size": 64,
1437
+ "bits": 4,
1438
+ "mode": "affine"
1439
+ },
1440
+ "model.layers.20.self_attn.v_proj": {
1441
+ "group_size": 64,
1442
+ "bits": 4,
1443
+ "mode": "affine"
1444
+ },
1445
+ "model.layers.20.self_attn.o_proj": {
1446
+ "group_size": 64,
1447
+ "bits": 4,
1448
+ "mode": "affine"
1449
+ },
1450
+ "model.layers.20.mlp.router": {
1451
+ "group_size": 64,
1452
+ "bits": 8
1453
+ },
1454
+ "model.layers.21.self_attn.q_proj": {
1455
+ "group_size": 64,
1456
+ "bits": 4,
1457
+ "mode": "affine"
1458
+ },
1459
+ "model.layers.21.self_attn.k_proj": {
1460
+ "group_size": 64,
1461
+ "bits": 4,
1462
+ "mode": "affine"
1463
+ },
1464
+ "model.layers.21.self_attn.v_proj": {
1465
+ "group_size": 64,
1466
+ "bits": 4,
1467
+ "mode": "affine"
1468
+ },
1469
+ "model.layers.21.self_attn.o_proj": {
1470
+ "group_size": 64,
1471
+ "bits": 4,
1472
+ "mode": "affine"
1473
+ },
1474
+ "model.layers.21.mlp.router": {
1475
+ "group_size": 64,
1476
+ "bits": 8
1477
+ },
1478
+ "model.layers.22.self_attn.q_proj": {
1479
+ "group_size": 64,
1480
+ "bits": 4,
1481
+ "mode": "affine"
1482
+ },
1483
+ "model.layers.22.self_attn.k_proj": {
1484
+ "group_size": 64,
1485
+ "bits": 4,
1486
+ "mode": "affine"
1487
+ },
1488
+ "model.layers.22.self_attn.v_proj": {
1489
+ "group_size": 64,
1490
+ "bits": 4,
1491
+ "mode": "affine"
1492
+ },
1493
+ "model.layers.22.self_attn.o_proj": {
1494
+ "group_size": 64,
1495
+ "bits": 4,
1496
+ "mode": "affine"
1497
+ },
1498
+ "model.layers.22.mlp.router": {
1499
+ "group_size": 64,
1500
+ "bits": 8
1501
+ },
1502
+ "model.layers.23.self_attn.q_proj": {
1503
+ "group_size": 64,
1504
+ "bits": 4,
1505
+ "mode": "affine"
1506
+ },
1507
+ "model.layers.23.self_attn.k_proj": {
1508
+ "group_size": 64,
1509
+ "bits": 4,
1510
+ "mode": "affine"
1511
+ },
1512
+ "model.layers.23.self_attn.v_proj": {
1513
+ "group_size": 64,
1514
+ "bits": 4,
1515
+ "mode": "affine"
1516
+ },
1517
+ "model.layers.23.self_attn.o_proj": {
1518
+ "group_size": 64,
1519
+ "bits": 4,
1520
+ "mode": "affine"
1521
+ },
1522
+ "model.layers.23.mlp.router": {
1523
+ "group_size": 64,
1524
+ "bits": 8
1525
+ },
1526
+ "model.layers.24.self_attn.q_proj": {
1527
+ "group_size": 64,
1528
+ "bits": 4,
1529
+ "mode": "affine"
1530
+ },
1531
+ "model.layers.24.self_attn.k_proj": {
1532
+ "group_size": 64,
1533
+ "bits": 4,
1534
+ "mode": "affine"
1535
+ },
1536
+ "model.layers.24.self_attn.v_proj": {
1537
+ "group_size": 64,
1538
+ "bits": 4,
1539
+ "mode": "affine"
1540
+ },
1541
+ "model.layers.24.self_attn.o_proj": {
1542
+ "group_size": 64,
1543
+ "bits": 4,
1544
+ "mode": "affine"
1545
+ },
1546
+ "model.layers.24.mlp.router": {
1547
+ "group_size": 64,
1548
+ "bits": 8
1549
+ },
1550
+ "model.layers.25.self_attn.q_proj": {
1551
+ "group_size": 64,
1552
+ "bits": 4,
1553
+ "mode": "affine"
1554
+ },
1555
+ "model.layers.25.self_attn.k_proj": {
1556
+ "group_size": 64,
1557
+ "bits": 4,
1558
+ "mode": "affine"
1559
+ },
1560
+ "model.layers.25.self_attn.v_proj": {
1561
+ "group_size": 64,
1562
+ "bits": 4,
1563
+ "mode": "affine"
1564
+ },
1565
+ "model.layers.25.self_attn.o_proj": {
1566
+ "group_size": 64,
1567
+ "bits": 4,
1568
+ "mode": "affine"
1569
+ },
1570
+ "model.layers.25.mlp.router": {
1571
+ "group_size": 64,
1572
+ "bits": 8
1573
+ },
1574
+ "model.layers.26.self_attn.q_proj": {
1575
+ "group_size": 64,
1576
+ "bits": 4,
1577
+ "mode": "affine"
1578
+ },
1579
+ "model.layers.26.self_attn.k_proj": {
1580
+ "group_size": 64,
1581
+ "bits": 4,
1582
+ "mode": "affine"
1583
+ },
1584
+ "model.layers.26.self_attn.v_proj": {
1585
+ "group_size": 64,
1586
+ "bits": 4,
1587
+ "mode": "affine"
1588
+ },
1589
+ "model.layers.26.self_attn.o_proj": {
1590
+ "group_size": 64,
1591
+ "bits": 4,
1592
+ "mode": "affine"
1593
+ },
1594
+ "model.layers.26.mlp.router": {
1595
+ "group_size": 64,
1596
+ "bits": 8
1597
+ },
1598
+ "model.layers.27.self_attn.q_proj": {
1599
+ "group_size": 64,
1600
+ "bits": 4,
1601
+ "mode": "affine"
1602
+ },
1603
+ "model.layers.27.self_attn.k_proj": {
1604
+ "group_size": 64,
1605
+ "bits": 4,
1606
+ "mode": "affine"
1607
+ },
1608
+ "model.layers.27.self_attn.v_proj": {
1609
+ "group_size": 64,
1610
+ "bits": 4,
1611
+ "mode": "affine"
1612
+ },
1613
+ "model.layers.27.self_attn.o_proj": {
1614
+ "group_size": 64,
1615
+ "bits": 4,
1616
+ "mode": "affine"
1617
+ },
1618
+ "model.layers.27.mlp.router": {
1619
+ "group_size": 64,
1620
+ "bits": 8
1621
+ },
1622
+ "model.layers.28.self_attn.q_proj": {
1623
+ "group_size": 64,
1624
+ "bits": 4,
1625
+ "mode": "affine"
1626
+ },
1627
+ "model.layers.28.self_attn.k_proj": {
1628
+ "group_size": 64,
1629
+ "bits": 4,
1630
+ "mode": "affine"
1631
+ },
1632
+ "model.layers.28.self_attn.v_proj": {
1633
+ "group_size": 64,
1634
+ "bits": 4,
1635
+ "mode": "affine"
1636
+ },
1637
+ "model.layers.28.self_attn.o_proj": {
1638
+ "group_size": 64,
1639
+ "bits": 4,
1640
+ "mode": "affine"
1641
+ },
1642
+ "model.layers.28.mlp.router": {
1643
+ "group_size": 64,
1644
+ "bits": 8
1645
+ },
1646
+ "model.layers.29.self_attn.q_proj": {
1647
+ "group_size": 64,
1648
+ "bits": 4,
1649
+ "mode": "affine"
1650
+ },
1651
+ "model.layers.29.self_attn.k_proj": {
1652
+ "group_size": 64,
1653
+ "bits": 4,
1654
+ "mode": "affine"
1655
+ },
1656
+ "model.layers.29.self_attn.v_proj": {
1657
+ "group_size": 64,
1658
+ "bits": 4,
1659
+ "mode": "affine"
1660
+ },
1661
+ "model.layers.29.self_attn.o_proj": {
1662
+ "group_size": 64,
1663
+ "bits": 4,
1664
+ "mode": "affine"
1665
+ },
1666
+ "model.layers.29.mlp.router": {
1667
+ "group_size": 64,
1668
+ "bits": 8
1669
+ },
1670
+ "model.layers.30.self_attn.q_proj": {
1671
+ "group_size": 64,
1672
+ "bits": 4,
1673
+ "mode": "affine"
1674
+ },
1675
+ "model.layers.30.self_attn.k_proj": {
1676
+ "group_size": 64,
1677
+ "bits": 4,
1678
+ "mode": "affine"
1679
+ },
1680
+ "model.layers.30.self_attn.v_proj": {
1681
+ "group_size": 64,
1682
+ "bits": 4,
1683
+ "mode": "affine"
1684
+ },
1685
+ "model.layers.30.self_attn.o_proj": {
1686
+ "group_size": 64,
1687
+ "bits": 4,
1688
+ "mode": "affine"
1689
+ },
1690
+ "model.layers.30.mlp.router": {
1691
+ "group_size": 64,
1692
+ "bits": 8
1693
+ },
1694
+ "model.layers.31.self_attn.q_proj": {
1695
+ "group_size": 64,
1696
+ "bits": 4,
1697
+ "mode": "affine"
1698
+ },
1699
+ "model.layers.31.self_attn.k_proj": {
1700
+ "group_size": 64,
1701
+ "bits": 4,
1702
+ "mode": "affine"
1703
+ },
1704
+ "model.layers.31.self_attn.v_proj": {
1705
+ "group_size": 64,
1706
+ "bits": 4,
1707
+ "mode": "affine"
1708
+ },
1709
+ "model.layers.31.self_attn.o_proj": {
1710
+ "group_size": 64,
1711
+ "bits": 4,
1712
+ "mode": "affine"
1713
+ },
1714
+ "model.layers.31.mlp.router": {
1715
+ "group_size": 64,
1716
+ "bits": 8
1717
+ },
1718
+ "model.layers.32.self_attn.q_proj": {
1719
+ "group_size": 64,
1720
+ "bits": 4,
1721
+ "mode": "affine"
1722
+ },
1723
+ "model.layers.32.self_attn.k_proj": {
1724
+ "group_size": 64,
1725
+ "bits": 4,
1726
+ "mode": "affine"
1727
+ },
1728
+ "model.layers.32.self_attn.v_proj": {
1729
+ "group_size": 64,
1730
+ "bits": 4,
1731
+ "mode": "affine"
1732
+ },
1733
+ "model.layers.32.self_attn.o_proj": {
1734
+ "group_size": 64,
1735
+ "bits": 4,
1736
+ "mode": "affine"
1737
+ },
1738
+ "model.layers.32.mlp.router": {
1739
+ "group_size": 64,
1740
+ "bits": 8
1741
+ },
1742
+ "model.layers.33.self_attn.q_proj": {
1743
+ "group_size": 64,
1744
+ "bits": 4,
1745
+ "mode": "affine"
1746
+ },
1747
+ "model.layers.33.self_attn.k_proj": {
1748
+ "group_size": 64,
1749
+ "bits": 4,
1750
+ "mode": "affine"
1751
+ },
1752
+ "model.layers.33.self_attn.v_proj": {
1753
+ "group_size": 64,
1754
+ "bits": 4,
1755
+ "mode": "affine"
1756
+ },
1757
+ "model.layers.33.self_attn.o_proj": {
1758
+ "group_size": 64,
1759
+ "bits": 4,
1760
+ "mode": "affine"
1761
+ },
1762
+ "model.layers.33.mlp.router": {
1763
+ "group_size": 64,
1764
+ "bits": 8
1765
+ },
1766
+ "model.layers.34.self_attn.q_proj": {
1767
+ "group_size": 64,
1768
+ "bits": 4,
1769
+ "mode": "affine"
1770
+ },
1771
+ "model.layers.34.self_attn.k_proj": {
1772
+ "group_size": 64,
1773
+ "bits": 4,
1774
+ "mode": "affine"
1775
+ },
1776
+ "model.layers.34.self_attn.v_proj": {
1777
+ "group_size": 64,
1778
+ "bits": 4,
1779
+ "mode": "affine"
1780
+ },
1781
+ "model.layers.34.self_attn.o_proj": {
1782
+ "group_size": 64,
1783
+ "bits": 4,
1784
+ "mode": "affine"
1785
+ },
1786
+ "model.layers.34.mlp.router": {
1787
+ "group_size": 64,
1788
+ "bits": 8
1789
+ },
1790
+ "model.layers.35.self_attn.q_proj": {
1791
+ "group_size": 64,
1792
+ "bits": 4,
1793
+ "mode": "affine"
1794
+ },
1795
+ "model.layers.35.self_attn.k_proj": {
1796
+ "group_size": 64,
1797
+ "bits": 4,
1798
+ "mode": "affine"
1799
+ },
1800
+ "model.layers.35.self_attn.v_proj": {
1801
+ "group_size": 64,
1802
+ "bits": 4,
1803
+ "mode": "affine"
1804
+ },
1805
+ "model.layers.35.self_attn.o_proj": {
1806
+ "group_size": 64,
1807
+ "bits": 4,
1808
+ "mode": "affine"
1809
+ },
1810
+ "model.layers.35.mlp.router": {
1811
+ "group_size": 64,
1812
+ "bits": 8
1813
+ },
1814
+ "lm_head": {
1815
+ "group_size": 64,
1816
+ "bits": 4,
1817
+ "mode": "affine"
1818
+ }
1819
  },
1820
  "rms_norm_eps": 1e-05,
1821
  "rope_scaling": {
generation_config.json CHANGED
@@ -3,7 +3,8 @@
3
  "do_sample": true,
4
  "eos_token_id": [
5
  200002,
6
- 199999
 
7
  ],
8
  "pad_token_id": 199999,
9
  "transformers_version": "4.55.0.dev0"
 
3
  "do_sample": true,
4
  "eos_token_id": [
5
  200002,
6
+ 199999,
7
+ 200012
8
  ],
9
  "pad_token_id": 199999,
10
  "transformers_version": "4.55.0.dev0"
model-00001-of-00013.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:6d61e9c197a7f18a3f78e8c5ddc86a3d3cb2299ec61cee8fc6550ed3142c8208
3
- size 5154528757
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fd358642c5c70080f15ae4dc4850d13e282be56ee2cb81cd7e47b2ddb37ce5ae
3
+ size 4889475597
model-00002-of-00013.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:585d025ddebfa4d8da9cb776a192cda24027f99479fdbcc55e7321a2d0ba5ef2
3
- size 5359821538
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:59bc7abcaca624b31a08e062799142e8553b915911ff097295d6c8577036a7b6
3
+ size 5128867805
model-00003-of-00013.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:a88a694c0eed9f63516bde70351186bed511e452e8774f3f6c0a160e985855cc
3
- size 4896072625
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:764b6a85f3aebda5a289fb3291c43cc2402ff7e8f1514cbf2f00e78b2deda216
3
+ size 5128867817
model-00004-of-00013.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:68985d43180d29f8d64fc28e4eca8779e3bd67f941b8d33b876c504720993265
3
- size 5359821403
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a4e4d2a84ad98021d777f53d4717eaec17338ea5079361197221164c61c199ca
3
+ size 5128867806
model-00005-of-00013.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:9e9672b3a40c18c2300bb0f3e9bc803e68bd80f98583e556405a00d72d4ac3e5
3
- size 4896072747
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:48971dbba87d315c9af96143f879b674329edf65526f3db41f0a3f854b4d6ba2
3
+ size 5128867937
model-00006-of-00013.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:c5e2991df6b66c07270b19eadf7a79f4d6949e0ee37d33338978aec664b68124
3
- size 5344653819
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:975f0a09177a331a08411a72bf80fd345f59e0c5de6ca2c86a8fe1249f76a2d5
3
+ size 5128867857
model-00007-of-00013.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:ca428d9a23994c50f35965d60bc487325315dcd8188b3f36ad5af5ff1e88f3fa
3
- size 4896072771
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1699fcaaebbf59a72e6c4b85b760ac6e27d1c26d6d235a555d46195e1574fee3
3
+ size 5128867933
model-00008-of-00013.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:0a11ee61e016e3bd55ad88cb25c16a6a7d8f59877abed2d49d042327f38b76de
3
- size 5359821670
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:207457e810f88fe7285104104a58c1b5b1571f3dbd0c45add65b5a979595f5eb
3
+ size 5128867907
model-00009-of-00013.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:6d6b302051d253b106778d38e2d1943fe93f95578f4cafdf33e7eb5c0e337b80
3
- size 4896072725
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:901125482399ebb09a78767b7b6d1b05549a5b8ee6482a25684283745773ca90
3
+ size 5128867905
model-00010-of-00013.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:6891dae45de1ac295332ae17e444116d272196c5a715ddabc8b604cb21cd361f
3
- size 5359821628
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f39dc4bd4780653eb4a874a9cf23477cb123cd7798c367142c81be40dd2be4a6
3
+ size 5128867921
model-00011-of-00013.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:c185ad8907180b5d6cb387918ed75bf4886563c6099a364d83a92ce49a1694bb
3
- size 4896072681
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c44627235138c1553e1036d9043b492a065be604fc22c5d845b16d90e69ba4ee
3
+ size 5128867937
model-00012-of-00013.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:ab2fc1e6a7ef9f3a4e954ada02caefb7e079f2f55362e75299152abcbb7ef5d0
3
- size 5344653887
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b2fe971dce8a424b9687fc79ca1332616e1bfb5f6d6f77bf6fb115767a63a186
3
+ size 5128867921
model-00013-of-00013.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:f99813a191248a830e460c88957909e5b310d32efe8e1147522e0fc7b5b9a718
3
- size 4011018459
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cc4057ba3da05c4e0d33e3435b19a12dacefb6fe222c7e95c7aa83dab35f28af
3
+ size 890923814
model.safetensors.index.json CHANGED
@@ -1,6 +1,6 @@
1
  {
2
  "metadata": {
3
- "total_size": 65774358144,
4
  "total_parameters": 116829154368
5
  },
6
  "weight_map": {
@@ -12,15 +12,12 @@
12
  "model.embed_tokens.weight": "model-00001-of-00013.safetensors",
13
  "model.layers.0.input_layernorm.weight": "model-00001-of-00013.safetensors",
14
  "model.layers.0.mlp.experts.down_proj.bias": "model-00001-of-00013.safetensors",
15
- "model.layers.0.mlp.experts.down_proj.biases": "model-00001-of-00013.safetensors",
16
  "model.layers.0.mlp.experts.down_proj.scales": "model-00001-of-00013.safetensors",
17
  "model.layers.0.mlp.experts.down_proj.weight": "model-00001-of-00013.safetensors",
18
  "model.layers.0.mlp.experts.gate_proj.bias": "model-00001-of-00013.safetensors",
19
- "model.layers.0.mlp.experts.gate_proj.biases": "model-00001-of-00013.safetensors",
20
  "model.layers.0.mlp.experts.gate_proj.scales": "model-00001-of-00013.safetensors",
21
  "model.layers.0.mlp.experts.gate_proj.weight": "model-00001-of-00013.safetensors",
22
  "model.layers.0.mlp.experts.up_proj.bias": "model-00001-of-00013.safetensors",
23
- "model.layers.0.mlp.experts.up_proj.biases": "model-00001-of-00013.safetensors",
24
  "model.layers.0.mlp.experts.up_proj.scales": "model-00001-of-00013.safetensors",
25
  "model.layers.0.mlp.experts.up_proj.weight": "model-00001-of-00013.safetensors",
26
  "model.layers.0.mlp.router.bias": "model-00001-of-00013.safetensors",
@@ -47,15 +44,12 @@
47
  "model.layers.0.self_attn.v_proj.weight": "model-00001-of-00013.safetensors",
48
  "model.layers.1.input_layernorm.weight": "model-00001-of-00013.safetensors",
49
  "model.layers.1.mlp.experts.down_proj.bias": "model-00001-of-00013.safetensors",
50
- "model.layers.1.mlp.experts.down_proj.biases": "model-00001-of-00013.safetensors",
51
  "model.layers.1.mlp.experts.down_proj.scales": "model-00001-of-00013.safetensors",
52
  "model.layers.1.mlp.experts.down_proj.weight": "model-00001-of-00013.safetensors",
53
  "model.layers.1.mlp.experts.gate_proj.bias": "model-00001-of-00013.safetensors",
54
- "model.layers.1.mlp.experts.gate_proj.biases": "model-00001-of-00013.safetensors",
55
  "model.layers.1.mlp.experts.gate_proj.scales": "model-00001-of-00013.safetensors",
56
  "model.layers.1.mlp.experts.gate_proj.weight": "model-00001-of-00013.safetensors",
57
  "model.layers.1.mlp.experts.up_proj.bias": "model-00001-of-00013.safetensors",
58
- "model.layers.1.mlp.experts.up_proj.biases": "model-00001-of-00013.safetensors",
59
  "model.layers.1.mlp.experts.up_proj.scales": "model-00001-of-00013.safetensors",
60
  "model.layers.1.mlp.experts.up_proj.weight": "model-00001-of-00013.safetensors",
61
  "model.layers.1.mlp.router.bias": "model-00001-of-00013.safetensors",
@@ -82,15 +76,12 @@
82
  "model.layers.1.self_attn.v_proj.weight": "model-00001-of-00013.safetensors",
83
  "model.layers.10.input_layernorm.weight": "model-00004-of-00013.safetensors",
84
  "model.layers.10.mlp.experts.down_proj.bias": "model-00004-of-00013.safetensors",
85
- "model.layers.10.mlp.experts.down_proj.biases": "model-00004-of-00013.safetensors",
86
  "model.layers.10.mlp.experts.down_proj.scales": "model-00004-of-00013.safetensors",
87
  "model.layers.10.mlp.experts.down_proj.weight": "model-00004-of-00013.safetensors",
88
  "model.layers.10.mlp.experts.gate_proj.bias": "model-00004-of-00013.safetensors",
89
- "model.layers.10.mlp.experts.gate_proj.biases": "model-00004-of-00013.safetensors",
90
  "model.layers.10.mlp.experts.gate_proj.scales": "model-00004-of-00013.safetensors",
91
  "model.layers.10.mlp.experts.gate_proj.weight": "model-00004-of-00013.safetensors",
92
  "model.layers.10.mlp.experts.up_proj.bias": "model-00004-of-00013.safetensors",
93
- "model.layers.10.mlp.experts.up_proj.biases": "model-00004-of-00013.safetensors",
94
  "model.layers.10.mlp.experts.up_proj.scales": "model-00004-of-00013.safetensors",
95
  "model.layers.10.mlp.experts.up_proj.weight": "model-00004-of-00013.safetensors",
96
  "model.layers.10.mlp.router.bias": "model-00004-of-00013.safetensors",
@@ -117,17 +108,14 @@
117
  "model.layers.10.self_attn.v_proj.weight": "model-00004-of-00013.safetensors",
118
  "model.layers.11.input_layernorm.weight": "model-00005-of-00013.safetensors",
119
  "model.layers.11.mlp.experts.down_proj.bias": "model-00005-of-00013.safetensors",
120
- "model.layers.11.mlp.experts.down_proj.biases": "model-00005-of-00013.safetensors",
121
  "model.layers.11.mlp.experts.down_proj.scales": "model-00005-of-00013.safetensors",
122
  "model.layers.11.mlp.experts.down_proj.weight": "model-00005-of-00013.safetensors",
123
- "model.layers.11.mlp.experts.gate_proj.bias": "model-00005-of-00013.safetensors",
124
- "model.layers.11.mlp.experts.gate_proj.biases": "model-00005-of-00013.safetensors",
125
- "model.layers.11.mlp.experts.gate_proj.scales": "model-00005-of-00013.safetensors",
126
  "model.layers.11.mlp.experts.gate_proj.weight": "model-00004-of-00013.safetensors",
127
- "model.layers.11.mlp.experts.up_proj.bias": "model-00005-of-00013.safetensors",
128
- "model.layers.11.mlp.experts.up_proj.biases": "model-00005-of-00013.safetensors",
129
- "model.layers.11.mlp.experts.up_proj.scales": "model-00005-of-00013.safetensors",
130
- "model.layers.11.mlp.experts.up_proj.weight": "model-00005-of-00013.safetensors",
131
  "model.layers.11.mlp.router.bias": "model-00005-of-00013.safetensors",
132
  "model.layers.11.mlp.router.biases": "model-00005-of-00013.safetensors",
133
  "model.layers.11.mlp.router.scales": "model-00005-of-00013.safetensors",
@@ -152,15 +140,12 @@
152
  "model.layers.11.self_attn.v_proj.weight": "model-00004-of-00013.safetensors",
153
  "model.layers.12.input_layernorm.weight": "model-00005-of-00013.safetensors",
154
  "model.layers.12.mlp.experts.down_proj.bias": "model-00005-of-00013.safetensors",
155
- "model.layers.12.mlp.experts.down_proj.biases": "model-00005-of-00013.safetensors",
156
  "model.layers.12.mlp.experts.down_proj.scales": "model-00005-of-00013.safetensors",
157
  "model.layers.12.mlp.experts.down_proj.weight": "model-00005-of-00013.safetensors",
158
  "model.layers.12.mlp.experts.gate_proj.bias": "model-00005-of-00013.safetensors",
159
- "model.layers.12.mlp.experts.gate_proj.biases": "model-00005-of-00013.safetensors",
160
  "model.layers.12.mlp.experts.gate_proj.scales": "model-00005-of-00013.safetensors",
161
  "model.layers.12.mlp.experts.gate_proj.weight": "model-00005-of-00013.safetensors",
162
  "model.layers.12.mlp.experts.up_proj.bias": "model-00005-of-00013.safetensors",
163
- "model.layers.12.mlp.experts.up_proj.biases": "model-00005-of-00013.safetensors",
164
  "model.layers.12.mlp.experts.up_proj.scales": "model-00005-of-00013.safetensors",
165
  "model.layers.12.mlp.experts.up_proj.weight": "model-00005-of-00013.safetensors",
166
  "model.layers.12.mlp.router.bias": "model-00005-of-00013.safetensors",
@@ -187,15 +172,12 @@
187
  "model.layers.12.self_attn.v_proj.weight": "model-00005-of-00013.safetensors",
188
  "model.layers.13.input_layernorm.weight": "model-00005-of-00013.safetensors",
189
  "model.layers.13.mlp.experts.down_proj.bias": "model-00005-of-00013.safetensors",
190
- "model.layers.13.mlp.experts.down_proj.biases": "model-00005-of-00013.safetensors",
191
  "model.layers.13.mlp.experts.down_proj.scales": "model-00005-of-00013.safetensors",
192
  "model.layers.13.mlp.experts.down_proj.weight": "model-00005-of-00013.safetensors",
193
  "model.layers.13.mlp.experts.gate_proj.bias": "model-00005-of-00013.safetensors",
194
- "model.layers.13.mlp.experts.gate_proj.biases": "model-00005-of-00013.safetensors",
195
  "model.layers.13.mlp.experts.gate_proj.scales": "model-00005-of-00013.safetensors",
196
  "model.layers.13.mlp.experts.gate_proj.weight": "model-00005-of-00013.safetensors",
197
  "model.layers.13.mlp.experts.up_proj.bias": "model-00005-of-00013.safetensors",
198
- "model.layers.13.mlp.experts.up_proj.biases": "model-00005-of-00013.safetensors",
199
  "model.layers.13.mlp.experts.up_proj.scales": "model-00005-of-00013.safetensors",
200
  "model.layers.13.mlp.experts.up_proj.weight": "model-00005-of-00013.safetensors",
201
  "model.layers.13.mlp.router.bias": "model-00005-of-00013.safetensors",
@@ -222,17 +204,14 @@
222
  "model.layers.13.self_attn.v_proj.weight": "model-00005-of-00013.safetensors",
223
  "model.layers.14.input_layernorm.weight": "model-00006-of-00013.safetensors",
224
  "model.layers.14.mlp.experts.down_proj.bias": "model-00006-of-00013.safetensors",
225
- "model.layers.14.mlp.experts.down_proj.biases": "model-00006-of-00013.safetensors",
226
  "model.layers.14.mlp.experts.down_proj.scales": "model-00006-of-00013.safetensors",
227
  "model.layers.14.mlp.experts.down_proj.weight": "model-00006-of-00013.safetensors",
228
- "model.layers.14.mlp.experts.gate_proj.bias": "model-00006-of-00013.safetensors",
229
- "model.layers.14.mlp.experts.gate_proj.biases": "model-00006-of-00013.safetensors",
230
- "model.layers.14.mlp.experts.gate_proj.scales": "model-00006-of-00013.safetensors",
231
- "model.layers.14.mlp.experts.gate_proj.weight": "model-00006-of-00013.safetensors",
232
- "model.layers.14.mlp.experts.up_proj.bias": "model-00006-of-00013.safetensors",
233
- "model.layers.14.mlp.experts.up_proj.biases": "model-00006-of-00013.safetensors",
234
- "model.layers.14.mlp.experts.up_proj.scales": "model-00006-of-00013.safetensors",
235
- "model.layers.14.mlp.experts.up_proj.weight": "model-00006-of-00013.safetensors",
236
  "model.layers.14.mlp.router.bias": "model-00006-of-00013.safetensors",
237
  "model.layers.14.mlp.router.biases": "model-00006-of-00013.safetensors",
238
  "model.layers.14.mlp.router.scales": "model-00006-of-00013.safetensors",
@@ -257,15 +236,12 @@
257
  "model.layers.14.self_attn.v_proj.weight": "model-00005-of-00013.safetensors",
258
  "model.layers.15.input_layernorm.weight": "model-00006-of-00013.safetensors",
259
  "model.layers.15.mlp.experts.down_proj.bias": "model-00006-of-00013.safetensors",
260
- "model.layers.15.mlp.experts.down_proj.biases": "model-00006-of-00013.safetensors",
261
  "model.layers.15.mlp.experts.down_proj.scales": "model-00006-of-00013.safetensors",
262
  "model.layers.15.mlp.experts.down_proj.weight": "model-00006-of-00013.safetensors",
263
  "model.layers.15.mlp.experts.gate_proj.bias": "model-00006-of-00013.safetensors",
264
- "model.layers.15.mlp.experts.gate_proj.biases": "model-00006-of-00013.safetensors",
265
  "model.layers.15.mlp.experts.gate_proj.scales": "model-00006-of-00013.safetensors",
266
  "model.layers.15.mlp.experts.gate_proj.weight": "model-00006-of-00013.safetensors",
267
  "model.layers.15.mlp.experts.up_proj.bias": "model-00006-of-00013.safetensors",
268
- "model.layers.15.mlp.experts.up_proj.biases": "model-00006-of-00013.safetensors",
269
  "model.layers.15.mlp.experts.up_proj.scales": "model-00006-of-00013.safetensors",
270
  "model.layers.15.mlp.experts.up_proj.weight": "model-00006-of-00013.safetensors",
271
  "model.layers.15.mlp.router.bias": "model-00006-of-00013.safetensors",
@@ -290,24 +266,21 @@
290
  "model.layers.15.self_attn.v_proj.biases": "model-00006-of-00013.safetensors",
291
  "model.layers.15.self_attn.v_proj.scales": "model-00006-of-00013.safetensors",
292
  "model.layers.15.self_attn.v_proj.weight": "model-00006-of-00013.safetensors",
293
- "model.layers.16.input_layernorm.weight": "model-00007-of-00013.safetensors",
294
- "model.layers.16.mlp.experts.down_proj.bias": "model-00007-of-00013.safetensors",
295
- "model.layers.16.mlp.experts.down_proj.biases": "model-00007-of-00013.safetensors",
296
- "model.layers.16.mlp.experts.down_proj.scales": "model-00007-of-00013.safetensors",
297
  "model.layers.16.mlp.experts.down_proj.weight": "model-00006-of-00013.safetensors",
298
  "model.layers.16.mlp.experts.gate_proj.bias": "model-00006-of-00013.safetensors",
299
- "model.layers.16.mlp.experts.gate_proj.biases": "model-00006-of-00013.safetensors",
300
  "model.layers.16.mlp.experts.gate_proj.scales": "model-00006-of-00013.safetensors",
301
  "model.layers.16.mlp.experts.gate_proj.weight": "model-00006-of-00013.safetensors",
302
  "model.layers.16.mlp.experts.up_proj.bias": "model-00006-of-00013.safetensors",
303
- "model.layers.16.mlp.experts.up_proj.biases": "model-00006-of-00013.safetensors",
304
  "model.layers.16.mlp.experts.up_proj.scales": "model-00006-of-00013.safetensors",
305
  "model.layers.16.mlp.experts.up_proj.weight": "model-00006-of-00013.safetensors",
306
- "model.layers.16.mlp.router.bias": "model-00007-of-00013.safetensors",
307
- "model.layers.16.mlp.router.biases": "model-00007-of-00013.safetensors",
308
- "model.layers.16.mlp.router.scales": "model-00007-of-00013.safetensors",
309
- "model.layers.16.mlp.router.weight": "model-00007-of-00013.safetensors",
310
- "model.layers.16.post_attention_layernorm.weight": "model-00007-of-00013.safetensors",
311
  "model.layers.16.self_attn.k_proj.bias": "model-00006-of-00013.safetensors",
312
  "model.layers.16.self_attn.k_proj.biases": "model-00006-of-00013.safetensors",
313
  "model.layers.16.self_attn.k_proj.scales": "model-00006-of-00013.safetensors",
@@ -327,50 +300,44 @@
327
  "model.layers.16.self_attn.v_proj.weight": "model-00006-of-00013.safetensors",
328
  "model.layers.17.input_layernorm.weight": "model-00007-of-00013.safetensors",
329
  "model.layers.17.mlp.experts.down_proj.bias": "model-00007-of-00013.safetensors",
330
- "model.layers.17.mlp.experts.down_proj.biases": "model-00007-of-00013.safetensors",
331
  "model.layers.17.mlp.experts.down_proj.scales": "model-00007-of-00013.safetensors",
332
  "model.layers.17.mlp.experts.down_proj.weight": "model-00007-of-00013.safetensors",
333
- "model.layers.17.mlp.experts.gate_proj.bias": "model-00007-of-00013.safetensors",
334
- "model.layers.17.mlp.experts.gate_proj.biases": "model-00007-of-00013.safetensors",
335
- "model.layers.17.mlp.experts.gate_proj.scales": "model-00007-of-00013.safetensors",
336
- "model.layers.17.mlp.experts.gate_proj.weight": "model-00007-of-00013.safetensors",
337
- "model.layers.17.mlp.experts.up_proj.bias": "model-00007-of-00013.safetensors",
338
- "model.layers.17.mlp.experts.up_proj.biases": "model-00007-of-00013.safetensors",
339
- "model.layers.17.mlp.experts.up_proj.scales": "model-00007-of-00013.safetensors",
340
- "model.layers.17.mlp.experts.up_proj.weight": "model-00007-of-00013.safetensors",
341
  "model.layers.17.mlp.router.bias": "model-00007-of-00013.safetensors",
342
  "model.layers.17.mlp.router.biases": "model-00007-of-00013.safetensors",
343
  "model.layers.17.mlp.router.scales": "model-00007-of-00013.safetensors",
344
  "model.layers.17.mlp.router.weight": "model-00007-of-00013.safetensors",
345
  "model.layers.17.post_attention_layernorm.weight": "model-00007-of-00013.safetensors",
346
- "model.layers.17.self_attn.k_proj.bias": "model-00007-of-00013.safetensors",
347
- "model.layers.17.self_attn.k_proj.biases": "model-00007-of-00013.safetensors",
348
- "model.layers.17.self_attn.k_proj.scales": "model-00007-of-00013.safetensors",
349
- "model.layers.17.self_attn.k_proj.weight": "model-00007-of-00013.safetensors",
350
- "model.layers.17.self_attn.o_proj.bias": "model-00007-of-00013.safetensors",
351
- "model.layers.17.self_attn.o_proj.biases": "model-00007-of-00013.safetensors",
352
- "model.layers.17.self_attn.o_proj.scales": "model-00007-of-00013.safetensors",
353
- "model.layers.17.self_attn.o_proj.weight": "model-00007-of-00013.safetensors",
354
- "model.layers.17.self_attn.q_proj.bias": "model-00007-of-00013.safetensors",
355
- "model.layers.17.self_attn.q_proj.biases": "model-00007-of-00013.safetensors",
356
- "model.layers.17.self_attn.q_proj.scales": "model-00007-of-00013.safetensors",
357
- "model.layers.17.self_attn.q_proj.weight": "model-00007-of-00013.safetensors",
358
- "model.layers.17.self_attn.sinks": "model-00007-of-00013.safetensors",
359
- "model.layers.17.self_attn.v_proj.bias": "model-00007-of-00013.safetensors",
360
- "model.layers.17.self_attn.v_proj.biases": "model-00007-of-00013.safetensors",
361
- "model.layers.17.self_attn.v_proj.scales": "model-00007-of-00013.safetensors",
362
- "model.layers.17.self_attn.v_proj.weight": "model-00007-of-00013.safetensors",
363
  "model.layers.18.input_layernorm.weight": "model-00007-of-00013.safetensors",
364
  "model.layers.18.mlp.experts.down_proj.bias": "model-00007-of-00013.safetensors",
365
- "model.layers.18.mlp.experts.down_proj.biases": "model-00007-of-00013.safetensors",
366
  "model.layers.18.mlp.experts.down_proj.scales": "model-00007-of-00013.safetensors",
367
  "model.layers.18.mlp.experts.down_proj.weight": "model-00007-of-00013.safetensors",
368
  "model.layers.18.mlp.experts.gate_proj.bias": "model-00007-of-00013.safetensors",
369
- "model.layers.18.mlp.experts.gate_proj.biases": "model-00007-of-00013.safetensors",
370
  "model.layers.18.mlp.experts.gate_proj.scales": "model-00007-of-00013.safetensors",
371
  "model.layers.18.mlp.experts.gate_proj.weight": "model-00007-of-00013.safetensors",
372
  "model.layers.18.mlp.experts.up_proj.bias": "model-00007-of-00013.safetensors",
373
- "model.layers.18.mlp.experts.up_proj.biases": "model-00007-of-00013.safetensors",
374
  "model.layers.18.mlp.experts.up_proj.scales": "model-00007-of-00013.safetensors",
375
  "model.layers.18.mlp.experts.up_proj.weight": "model-00007-of-00013.safetensors",
376
  "model.layers.18.mlp.router.bias": "model-00007-of-00013.safetensors",
@@ -395,24 +362,21 @@
395
  "model.layers.18.self_attn.v_proj.biases": "model-00007-of-00013.safetensors",
396
  "model.layers.18.self_attn.v_proj.scales": "model-00007-of-00013.safetensors",
397
  "model.layers.18.self_attn.v_proj.weight": "model-00007-of-00013.safetensors",
398
- "model.layers.19.input_layernorm.weight": "model-00008-of-00013.safetensors",
399
- "model.layers.19.mlp.experts.down_proj.bias": "model-00008-of-00013.safetensors",
400
- "model.layers.19.mlp.experts.down_proj.biases": "model-00008-of-00013.safetensors",
401
- "model.layers.19.mlp.experts.down_proj.scales": "model-00008-of-00013.safetensors",
402
- "model.layers.19.mlp.experts.down_proj.weight": "model-00008-of-00013.safetensors",
403
  "model.layers.19.mlp.experts.gate_proj.bias": "model-00007-of-00013.safetensors",
404
- "model.layers.19.mlp.experts.gate_proj.biases": "model-00007-of-00013.safetensors",
405
  "model.layers.19.mlp.experts.gate_proj.scales": "model-00007-of-00013.safetensors",
406
  "model.layers.19.mlp.experts.gate_proj.weight": "model-00007-of-00013.safetensors",
407
  "model.layers.19.mlp.experts.up_proj.bias": "model-00007-of-00013.safetensors",
408
- "model.layers.19.mlp.experts.up_proj.biases": "model-00007-of-00013.safetensors",
409
  "model.layers.19.mlp.experts.up_proj.scales": "model-00007-of-00013.safetensors",
410
  "model.layers.19.mlp.experts.up_proj.weight": "model-00007-of-00013.safetensors",
411
- "model.layers.19.mlp.router.bias": "model-00008-of-00013.safetensors",
412
- "model.layers.19.mlp.router.biases": "model-00008-of-00013.safetensors",
413
- "model.layers.19.mlp.router.scales": "model-00008-of-00013.safetensors",
414
- "model.layers.19.mlp.router.weight": "model-00008-of-00013.safetensors",
415
- "model.layers.19.post_attention_layernorm.weight": "model-00008-of-00013.safetensors",
416
  "model.layers.19.self_attn.k_proj.bias": "model-00007-of-00013.safetensors",
417
  "model.layers.19.self_attn.k_proj.biases": "model-00007-of-00013.safetensors",
418
  "model.layers.19.self_attn.k_proj.scales": "model-00007-of-00013.safetensors",
@@ -432,15 +396,12 @@
432
  "model.layers.19.self_attn.v_proj.weight": "model-00007-of-00013.safetensors",
433
  "model.layers.2.input_layernorm.weight": "model-00002-of-00013.safetensors",
434
  "model.layers.2.mlp.experts.down_proj.bias": "model-00002-of-00013.safetensors",
435
- "model.layers.2.mlp.experts.down_proj.biases": "model-00002-of-00013.safetensors",
436
  "model.layers.2.mlp.experts.down_proj.scales": "model-00002-of-00013.safetensors",
437
  "model.layers.2.mlp.experts.down_proj.weight": "model-00002-of-00013.safetensors",
438
  "model.layers.2.mlp.experts.gate_proj.bias": "model-00001-of-00013.safetensors",
439
- "model.layers.2.mlp.experts.gate_proj.biases": "model-00001-of-00013.safetensors",
440
  "model.layers.2.mlp.experts.gate_proj.scales": "model-00001-of-00013.safetensors",
441
  "model.layers.2.mlp.experts.gate_proj.weight": "model-00001-of-00013.safetensors",
442
  "model.layers.2.mlp.experts.up_proj.bias": "model-00001-of-00013.safetensors",
443
- "model.layers.2.mlp.experts.up_proj.biases": "model-00001-of-00013.safetensors",
444
  "model.layers.2.mlp.experts.up_proj.scales": "model-00001-of-00013.safetensors",
445
  "model.layers.2.mlp.experts.up_proj.weight": "model-00001-of-00013.safetensors",
446
  "model.layers.2.mlp.router.bias": "model-00002-of-00013.safetensors",
@@ -467,50 +428,44 @@
467
  "model.layers.2.self_attn.v_proj.weight": "model-00001-of-00013.safetensors",
468
  "model.layers.20.input_layernorm.weight": "model-00008-of-00013.safetensors",
469
  "model.layers.20.mlp.experts.down_proj.bias": "model-00008-of-00013.safetensors",
470
- "model.layers.20.mlp.experts.down_proj.biases": "model-00008-of-00013.safetensors",
471
  "model.layers.20.mlp.experts.down_proj.scales": "model-00008-of-00013.safetensors",
472
  "model.layers.20.mlp.experts.down_proj.weight": "model-00008-of-00013.safetensors",
473
- "model.layers.20.mlp.experts.gate_proj.bias": "model-00008-of-00013.safetensors",
474
- "model.layers.20.mlp.experts.gate_proj.biases": "model-00008-of-00013.safetensors",
475
- "model.layers.20.mlp.experts.gate_proj.scales": "model-00008-of-00013.safetensors",
476
- "model.layers.20.mlp.experts.gate_proj.weight": "model-00008-of-00013.safetensors",
477
- "model.layers.20.mlp.experts.up_proj.bias": "model-00008-of-00013.safetensors",
478
- "model.layers.20.mlp.experts.up_proj.biases": "model-00008-of-00013.safetensors",
479
- "model.layers.20.mlp.experts.up_proj.scales": "model-00008-of-00013.safetensors",
480
- "model.layers.20.mlp.experts.up_proj.weight": "model-00008-of-00013.safetensors",
481
  "model.layers.20.mlp.router.bias": "model-00008-of-00013.safetensors",
482
  "model.layers.20.mlp.router.biases": "model-00008-of-00013.safetensors",
483
  "model.layers.20.mlp.router.scales": "model-00008-of-00013.safetensors",
484
  "model.layers.20.mlp.router.weight": "model-00008-of-00013.safetensors",
485
  "model.layers.20.post_attention_layernorm.weight": "model-00008-of-00013.safetensors",
486
- "model.layers.20.self_attn.k_proj.bias": "model-00008-of-00013.safetensors",
487
- "model.layers.20.self_attn.k_proj.biases": "model-00008-of-00013.safetensors",
488
- "model.layers.20.self_attn.k_proj.scales": "model-00008-of-00013.safetensors",
489
- "model.layers.20.self_attn.k_proj.weight": "model-00008-of-00013.safetensors",
490
- "model.layers.20.self_attn.o_proj.bias": "model-00008-of-00013.safetensors",
491
- "model.layers.20.self_attn.o_proj.biases": "model-00008-of-00013.safetensors",
492
- "model.layers.20.self_attn.o_proj.scales": "model-00008-of-00013.safetensors",
493
- "model.layers.20.self_attn.o_proj.weight": "model-00008-of-00013.safetensors",
494
- "model.layers.20.self_attn.q_proj.bias": "model-00008-of-00013.safetensors",
495
- "model.layers.20.self_attn.q_proj.biases": "model-00008-of-00013.safetensors",
496
- "model.layers.20.self_attn.q_proj.scales": "model-00008-of-00013.safetensors",
497
- "model.layers.20.self_attn.q_proj.weight": "model-00008-of-00013.safetensors",
498
- "model.layers.20.self_attn.sinks": "model-00008-of-00013.safetensors",
499
- "model.layers.20.self_attn.v_proj.bias": "model-00008-of-00013.safetensors",
500
- "model.layers.20.self_attn.v_proj.biases": "model-00008-of-00013.safetensors",
501
- "model.layers.20.self_attn.v_proj.scales": "model-00008-of-00013.safetensors",
502
- "model.layers.20.self_attn.v_proj.weight": "model-00008-of-00013.safetensors",
503
  "model.layers.21.input_layernorm.weight": "model-00008-of-00013.safetensors",
504
  "model.layers.21.mlp.experts.down_proj.bias": "model-00008-of-00013.safetensors",
505
- "model.layers.21.mlp.experts.down_proj.biases": "model-00008-of-00013.safetensors",
506
  "model.layers.21.mlp.experts.down_proj.scales": "model-00008-of-00013.safetensors",
507
  "model.layers.21.mlp.experts.down_proj.weight": "model-00008-of-00013.safetensors",
508
  "model.layers.21.mlp.experts.gate_proj.bias": "model-00008-of-00013.safetensors",
509
- "model.layers.21.mlp.experts.gate_proj.biases": "model-00008-of-00013.safetensors",
510
  "model.layers.21.mlp.experts.gate_proj.scales": "model-00008-of-00013.safetensors",
511
  "model.layers.21.mlp.experts.gate_proj.weight": "model-00008-of-00013.safetensors",
512
  "model.layers.21.mlp.experts.up_proj.bias": "model-00008-of-00013.safetensors",
513
- "model.layers.21.mlp.experts.up_proj.biases": "model-00008-of-00013.safetensors",
514
  "model.layers.21.mlp.experts.up_proj.scales": "model-00008-of-00013.safetensors",
515
  "model.layers.21.mlp.experts.up_proj.weight": "model-00008-of-00013.safetensors",
516
  "model.layers.21.mlp.router.bias": "model-00008-of-00013.safetensors",
@@ -535,24 +490,21 @@
535
  "model.layers.21.self_attn.v_proj.biases": "model-00008-of-00013.safetensors",
536
  "model.layers.21.self_attn.v_proj.scales": "model-00008-of-00013.safetensors",
537
  "model.layers.21.self_attn.v_proj.weight": "model-00008-of-00013.safetensors",
538
- "model.layers.22.input_layernorm.weight": "model-00009-of-00013.safetensors",
539
- "model.layers.22.mlp.experts.down_proj.bias": "model-00009-of-00013.safetensors",
540
- "model.layers.22.mlp.experts.down_proj.biases": "model-00009-of-00013.safetensors",
541
- "model.layers.22.mlp.experts.down_proj.scales": "model-00009-of-00013.safetensors",
542
- "model.layers.22.mlp.experts.down_proj.weight": "model-00009-of-00013.safetensors",
543
  "model.layers.22.mlp.experts.gate_proj.bias": "model-00008-of-00013.safetensors",
544
- "model.layers.22.mlp.experts.gate_proj.biases": "model-00008-of-00013.safetensors",
545
  "model.layers.22.mlp.experts.gate_proj.scales": "model-00008-of-00013.safetensors",
546
  "model.layers.22.mlp.experts.gate_proj.weight": "model-00008-of-00013.safetensors",
547
- "model.layers.22.mlp.experts.up_proj.bias": "model-00009-of-00013.safetensors",
548
- "model.layers.22.mlp.experts.up_proj.biases": "model-00009-of-00013.safetensors",
549
- "model.layers.22.mlp.experts.up_proj.scales": "model-00009-of-00013.safetensors",
550
  "model.layers.22.mlp.experts.up_proj.weight": "model-00008-of-00013.safetensors",
551
- "model.layers.22.mlp.router.bias": "model-00009-of-00013.safetensors",
552
- "model.layers.22.mlp.router.biases": "model-00009-of-00013.safetensors",
553
- "model.layers.22.mlp.router.scales": "model-00009-of-00013.safetensors",
554
- "model.layers.22.mlp.router.weight": "model-00009-of-00013.safetensors",
555
- "model.layers.22.post_attention_layernorm.weight": "model-00009-of-00013.safetensors",
556
  "model.layers.22.self_attn.k_proj.bias": "model-00008-of-00013.safetensors",
557
  "model.layers.22.self_attn.k_proj.biases": "model-00008-of-00013.safetensors",
558
  "model.layers.22.self_attn.k_proj.scales": "model-00008-of-00013.safetensors",
@@ -572,50 +524,44 @@
572
  "model.layers.22.self_attn.v_proj.weight": "model-00008-of-00013.safetensors",
573
  "model.layers.23.input_layernorm.weight": "model-00009-of-00013.safetensors",
574
  "model.layers.23.mlp.experts.down_proj.bias": "model-00009-of-00013.safetensors",
575
- "model.layers.23.mlp.experts.down_proj.biases": "model-00009-of-00013.safetensors",
576
  "model.layers.23.mlp.experts.down_proj.scales": "model-00009-of-00013.safetensors",
577
  "model.layers.23.mlp.experts.down_proj.weight": "model-00009-of-00013.safetensors",
578
- "model.layers.23.mlp.experts.gate_proj.bias": "model-00009-of-00013.safetensors",
579
- "model.layers.23.mlp.experts.gate_proj.biases": "model-00009-of-00013.safetensors",
580
- "model.layers.23.mlp.experts.gate_proj.scales": "model-00009-of-00013.safetensors",
581
- "model.layers.23.mlp.experts.gate_proj.weight": "model-00009-of-00013.safetensors",
582
- "model.layers.23.mlp.experts.up_proj.bias": "model-00009-of-00013.safetensors",
583
- "model.layers.23.mlp.experts.up_proj.biases": "model-00009-of-00013.safetensors",
584
- "model.layers.23.mlp.experts.up_proj.scales": "model-00009-of-00013.safetensors",
585
- "model.layers.23.mlp.experts.up_proj.weight": "model-00009-of-00013.safetensors",
586
  "model.layers.23.mlp.router.bias": "model-00009-of-00013.safetensors",
587
  "model.layers.23.mlp.router.biases": "model-00009-of-00013.safetensors",
588
  "model.layers.23.mlp.router.scales": "model-00009-of-00013.safetensors",
589
  "model.layers.23.mlp.router.weight": "model-00009-of-00013.safetensors",
590
  "model.layers.23.post_attention_layernorm.weight": "model-00009-of-00013.safetensors",
591
- "model.layers.23.self_attn.k_proj.bias": "model-00009-of-00013.safetensors",
592
- "model.layers.23.self_attn.k_proj.biases": "model-00009-of-00013.safetensors",
593
- "model.layers.23.self_attn.k_proj.scales": "model-00009-of-00013.safetensors",
594
- "model.layers.23.self_attn.k_proj.weight": "model-00009-of-00013.safetensors",
595
- "model.layers.23.self_attn.o_proj.bias": "model-00009-of-00013.safetensors",
596
- "model.layers.23.self_attn.o_proj.biases": "model-00009-of-00013.safetensors",
597
- "model.layers.23.self_attn.o_proj.scales": "model-00009-of-00013.safetensors",
598
- "model.layers.23.self_attn.o_proj.weight": "model-00009-of-00013.safetensors",
599
- "model.layers.23.self_attn.q_proj.bias": "model-00009-of-00013.safetensors",
600
- "model.layers.23.self_attn.q_proj.biases": "model-00009-of-00013.safetensors",
601
- "model.layers.23.self_attn.q_proj.scales": "model-00009-of-00013.safetensors",
602
- "model.layers.23.self_attn.q_proj.weight": "model-00009-of-00013.safetensors",
603
- "model.layers.23.self_attn.sinks": "model-00009-of-00013.safetensors",
604
- "model.layers.23.self_attn.v_proj.bias": "model-00009-of-00013.safetensors",
605
- "model.layers.23.self_attn.v_proj.biases": "model-00009-of-00013.safetensors",
606
- "model.layers.23.self_attn.v_proj.scales": "model-00009-of-00013.safetensors",
607
- "model.layers.23.self_attn.v_proj.weight": "model-00009-of-00013.safetensors",
608
  "model.layers.24.input_layernorm.weight": "model-00009-of-00013.safetensors",
609
  "model.layers.24.mlp.experts.down_proj.bias": "model-00009-of-00013.safetensors",
610
- "model.layers.24.mlp.experts.down_proj.biases": "model-00009-of-00013.safetensors",
611
  "model.layers.24.mlp.experts.down_proj.scales": "model-00009-of-00013.safetensors",
612
  "model.layers.24.mlp.experts.down_proj.weight": "model-00009-of-00013.safetensors",
613
  "model.layers.24.mlp.experts.gate_proj.bias": "model-00009-of-00013.safetensors",
614
- "model.layers.24.mlp.experts.gate_proj.biases": "model-00009-of-00013.safetensors",
615
  "model.layers.24.mlp.experts.gate_proj.scales": "model-00009-of-00013.safetensors",
616
  "model.layers.24.mlp.experts.gate_proj.weight": "model-00009-of-00013.safetensors",
617
  "model.layers.24.mlp.experts.up_proj.bias": "model-00009-of-00013.safetensors",
618
- "model.layers.24.mlp.experts.up_proj.biases": "model-00009-of-00013.safetensors",
619
  "model.layers.24.mlp.experts.up_proj.scales": "model-00009-of-00013.safetensors",
620
  "model.layers.24.mlp.experts.up_proj.weight": "model-00009-of-00013.safetensors",
621
  "model.layers.24.mlp.router.bias": "model-00009-of-00013.safetensors",
@@ -640,24 +586,21 @@
640
  "model.layers.24.self_attn.v_proj.biases": "model-00009-of-00013.safetensors",
641
  "model.layers.24.self_attn.v_proj.scales": "model-00009-of-00013.safetensors",
642
  "model.layers.24.self_attn.v_proj.weight": "model-00009-of-00013.safetensors",
643
- "model.layers.25.input_layernorm.weight": "model-00010-of-00013.safetensors",
644
- "model.layers.25.mlp.experts.down_proj.bias": "model-00010-of-00013.safetensors",
645
- "model.layers.25.mlp.experts.down_proj.biases": "model-00010-of-00013.safetensors",
646
- "model.layers.25.mlp.experts.down_proj.scales": "model-00010-of-00013.safetensors",
647
- "model.layers.25.mlp.experts.down_proj.weight": "model-00010-of-00013.safetensors",
648
  "model.layers.25.mlp.experts.gate_proj.bias": "model-00009-of-00013.safetensors",
649
- "model.layers.25.mlp.experts.gate_proj.biases": "model-00009-of-00013.safetensors",
650
  "model.layers.25.mlp.experts.gate_proj.scales": "model-00009-of-00013.safetensors",
651
  "model.layers.25.mlp.experts.gate_proj.weight": "model-00009-of-00013.safetensors",
652
- "model.layers.25.mlp.experts.up_proj.bias": "model-00010-of-00013.safetensors",
653
- "model.layers.25.mlp.experts.up_proj.biases": "model-00010-of-00013.safetensors",
654
- "model.layers.25.mlp.experts.up_proj.scales": "model-00010-of-00013.safetensors",
655
- "model.layers.25.mlp.experts.up_proj.weight": "model-00010-of-00013.safetensors",
656
- "model.layers.25.mlp.router.bias": "model-00010-of-00013.safetensors",
657
- "model.layers.25.mlp.router.biases": "model-00010-of-00013.safetensors",
658
- "model.layers.25.mlp.router.scales": "model-00010-of-00013.safetensors",
659
- "model.layers.25.mlp.router.weight": "model-00010-of-00013.safetensors",
660
- "model.layers.25.post_attention_layernorm.weight": "model-00010-of-00013.safetensors",
661
  "model.layers.25.self_attn.k_proj.bias": "model-00009-of-00013.safetensors",
662
  "model.layers.25.self_attn.k_proj.biases": "model-00009-of-00013.safetensors",
663
  "model.layers.25.self_attn.k_proj.scales": "model-00009-of-00013.safetensors",
@@ -677,50 +620,44 @@
677
  "model.layers.25.self_attn.v_proj.weight": "model-00009-of-00013.safetensors",
678
  "model.layers.26.input_layernorm.weight": "model-00010-of-00013.safetensors",
679
  "model.layers.26.mlp.experts.down_proj.bias": "model-00010-of-00013.safetensors",
680
- "model.layers.26.mlp.experts.down_proj.biases": "model-00010-of-00013.safetensors",
681
  "model.layers.26.mlp.experts.down_proj.scales": "model-00010-of-00013.safetensors",
682
  "model.layers.26.mlp.experts.down_proj.weight": "model-00010-of-00013.safetensors",
683
- "model.layers.26.mlp.experts.gate_proj.bias": "model-00010-of-00013.safetensors",
684
- "model.layers.26.mlp.experts.gate_proj.biases": "model-00010-of-00013.safetensors",
685
- "model.layers.26.mlp.experts.gate_proj.scales": "model-00010-of-00013.safetensors",
686
- "model.layers.26.mlp.experts.gate_proj.weight": "model-00010-of-00013.safetensors",
687
- "model.layers.26.mlp.experts.up_proj.bias": "model-00010-of-00013.safetensors",
688
- "model.layers.26.mlp.experts.up_proj.biases": "model-00010-of-00013.safetensors",
689
- "model.layers.26.mlp.experts.up_proj.scales": "model-00010-of-00013.safetensors",
690
- "model.layers.26.mlp.experts.up_proj.weight": "model-00010-of-00013.safetensors",
691
  "model.layers.26.mlp.router.bias": "model-00010-of-00013.safetensors",
692
  "model.layers.26.mlp.router.biases": "model-00010-of-00013.safetensors",
693
  "model.layers.26.mlp.router.scales": "model-00010-of-00013.safetensors",
694
  "model.layers.26.mlp.router.weight": "model-00010-of-00013.safetensors",
695
  "model.layers.26.post_attention_layernorm.weight": "model-00010-of-00013.safetensors",
696
- "model.layers.26.self_attn.k_proj.bias": "model-00010-of-00013.safetensors",
697
- "model.layers.26.self_attn.k_proj.biases": "model-00010-of-00013.safetensors",
698
- "model.layers.26.self_attn.k_proj.scales": "model-00010-of-00013.safetensors",
699
- "model.layers.26.self_attn.k_proj.weight": "model-00010-of-00013.safetensors",
700
- "model.layers.26.self_attn.o_proj.bias": "model-00010-of-00013.safetensors",
701
- "model.layers.26.self_attn.o_proj.biases": "model-00010-of-00013.safetensors",
702
- "model.layers.26.self_attn.o_proj.scales": "model-00010-of-00013.safetensors",
703
- "model.layers.26.self_attn.o_proj.weight": "model-00010-of-00013.safetensors",
704
- "model.layers.26.self_attn.q_proj.bias": "model-00010-of-00013.safetensors",
705
- "model.layers.26.self_attn.q_proj.biases": "model-00010-of-00013.safetensors",
706
- "model.layers.26.self_attn.q_proj.scales": "model-00010-of-00013.safetensors",
707
- "model.layers.26.self_attn.q_proj.weight": "model-00010-of-00013.safetensors",
708
- "model.layers.26.self_attn.sinks": "model-00010-of-00013.safetensors",
709
- "model.layers.26.self_attn.v_proj.bias": "model-00010-of-00013.safetensors",
710
- "model.layers.26.self_attn.v_proj.biases": "model-00010-of-00013.safetensors",
711
- "model.layers.26.self_attn.v_proj.scales": "model-00010-of-00013.safetensors",
712
- "model.layers.26.self_attn.v_proj.weight": "model-00010-of-00013.safetensors",
713
  "model.layers.27.input_layernorm.weight": "model-00010-of-00013.safetensors",
714
  "model.layers.27.mlp.experts.down_proj.bias": "model-00010-of-00013.safetensors",
715
- "model.layers.27.mlp.experts.down_proj.biases": "model-00010-of-00013.safetensors",
716
  "model.layers.27.mlp.experts.down_proj.scales": "model-00010-of-00013.safetensors",
717
  "model.layers.27.mlp.experts.down_proj.weight": "model-00010-of-00013.safetensors",
718
  "model.layers.27.mlp.experts.gate_proj.bias": "model-00010-of-00013.safetensors",
719
- "model.layers.27.mlp.experts.gate_proj.biases": "model-00010-of-00013.safetensors",
720
  "model.layers.27.mlp.experts.gate_proj.scales": "model-00010-of-00013.safetensors",
721
  "model.layers.27.mlp.experts.gate_proj.weight": "model-00010-of-00013.safetensors",
722
  "model.layers.27.mlp.experts.up_proj.bias": "model-00010-of-00013.safetensors",
723
- "model.layers.27.mlp.experts.up_proj.biases": "model-00010-of-00013.safetensors",
724
  "model.layers.27.mlp.experts.up_proj.scales": "model-00010-of-00013.safetensors",
725
  "model.layers.27.mlp.experts.up_proj.weight": "model-00010-of-00013.safetensors",
726
  "model.layers.27.mlp.router.bias": "model-00010-of-00013.safetensors",
@@ -745,24 +682,21 @@
745
  "model.layers.27.self_attn.v_proj.biases": "model-00010-of-00013.safetensors",
746
  "model.layers.27.self_attn.v_proj.scales": "model-00010-of-00013.safetensors",
747
  "model.layers.27.self_attn.v_proj.weight": "model-00010-of-00013.safetensors",
748
- "model.layers.28.input_layernorm.weight": "model-00011-of-00013.safetensors",
749
- "model.layers.28.mlp.experts.down_proj.bias": "model-00011-of-00013.safetensors",
750
- "model.layers.28.mlp.experts.down_proj.biases": "model-00011-of-00013.safetensors",
751
- "model.layers.28.mlp.experts.down_proj.scales": "model-00011-of-00013.safetensors",
752
- "model.layers.28.mlp.experts.down_proj.weight": "model-00011-of-00013.safetensors",
753
- "model.layers.28.mlp.experts.gate_proj.bias": "model-00011-of-00013.safetensors",
754
- "model.layers.28.mlp.experts.gate_proj.biases": "model-00011-of-00013.safetensors",
755
- "model.layers.28.mlp.experts.gate_proj.scales": "model-00011-of-00013.safetensors",
756
  "model.layers.28.mlp.experts.gate_proj.weight": "model-00010-of-00013.safetensors",
757
- "model.layers.28.mlp.experts.up_proj.bias": "model-00011-of-00013.safetensors",
758
- "model.layers.28.mlp.experts.up_proj.biases": "model-00011-of-00013.safetensors",
759
- "model.layers.28.mlp.experts.up_proj.scales": "model-00011-of-00013.safetensors",
760
- "model.layers.28.mlp.experts.up_proj.weight": "model-00011-of-00013.safetensors",
761
- "model.layers.28.mlp.router.bias": "model-00011-of-00013.safetensors",
762
- "model.layers.28.mlp.router.biases": "model-00011-of-00013.safetensors",
763
- "model.layers.28.mlp.router.scales": "model-00011-of-00013.safetensors",
764
- "model.layers.28.mlp.router.weight": "model-00011-of-00013.safetensors",
765
- "model.layers.28.post_attention_layernorm.weight": "model-00011-of-00013.safetensors",
766
  "model.layers.28.self_attn.k_proj.bias": "model-00010-of-00013.safetensors",
767
  "model.layers.28.self_attn.k_proj.biases": "model-00010-of-00013.safetensors",
768
  "model.layers.28.self_attn.k_proj.scales": "model-00010-of-00013.safetensors",
@@ -782,50 +716,44 @@
782
  "model.layers.28.self_attn.v_proj.weight": "model-00010-of-00013.safetensors",
783
  "model.layers.29.input_layernorm.weight": "model-00011-of-00013.safetensors",
784
  "model.layers.29.mlp.experts.down_proj.bias": "model-00011-of-00013.safetensors",
785
- "model.layers.29.mlp.experts.down_proj.biases": "model-00011-of-00013.safetensors",
786
  "model.layers.29.mlp.experts.down_proj.scales": "model-00011-of-00013.safetensors",
787
  "model.layers.29.mlp.experts.down_proj.weight": "model-00011-of-00013.safetensors",
788
- "model.layers.29.mlp.experts.gate_proj.bias": "model-00011-of-00013.safetensors",
789
- "model.layers.29.mlp.experts.gate_proj.biases": "model-00011-of-00013.safetensors",
790
- "model.layers.29.mlp.experts.gate_proj.scales": "model-00011-of-00013.safetensors",
791
- "model.layers.29.mlp.experts.gate_proj.weight": "model-00011-of-00013.safetensors",
792
- "model.layers.29.mlp.experts.up_proj.bias": "model-00011-of-00013.safetensors",
793
- "model.layers.29.mlp.experts.up_proj.biases": "model-00011-of-00013.safetensors",
794
- "model.layers.29.mlp.experts.up_proj.scales": "model-00011-of-00013.safetensors",
795
- "model.layers.29.mlp.experts.up_proj.weight": "model-00011-of-00013.safetensors",
796
  "model.layers.29.mlp.router.bias": "model-00011-of-00013.safetensors",
797
  "model.layers.29.mlp.router.biases": "model-00011-of-00013.safetensors",
798
  "model.layers.29.mlp.router.scales": "model-00011-of-00013.safetensors",
799
  "model.layers.29.mlp.router.weight": "model-00011-of-00013.safetensors",
800
  "model.layers.29.post_attention_layernorm.weight": "model-00011-of-00013.safetensors",
801
- "model.layers.29.self_attn.k_proj.bias": "model-00011-of-00013.safetensors",
802
- "model.layers.29.self_attn.k_proj.biases": "model-00011-of-00013.safetensors",
803
- "model.layers.29.self_attn.k_proj.scales": "model-00011-of-00013.safetensors",
804
- "model.layers.29.self_attn.k_proj.weight": "model-00011-of-00013.safetensors",
805
- "model.layers.29.self_attn.o_proj.bias": "model-00011-of-00013.safetensors",
806
- "model.layers.29.self_attn.o_proj.biases": "model-00011-of-00013.safetensors",
807
- "model.layers.29.self_attn.o_proj.scales": "model-00011-of-00013.safetensors",
808
- "model.layers.29.self_attn.o_proj.weight": "model-00011-of-00013.safetensors",
809
- "model.layers.29.self_attn.q_proj.bias": "model-00011-of-00013.safetensors",
810
- "model.layers.29.self_attn.q_proj.biases": "model-00011-of-00013.safetensors",
811
- "model.layers.29.self_attn.q_proj.scales": "model-00011-of-00013.safetensors",
812
- "model.layers.29.self_attn.q_proj.weight": "model-00011-of-00013.safetensors",
813
- "model.layers.29.self_attn.sinks": "model-00011-of-00013.safetensors",
814
- "model.layers.29.self_attn.v_proj.bias": "model-00011-of-00013.safetensors",
815
- "model.layers.29.self_attn.v_proj.biases": "model-00011-of-00013.safetensors",
816
- "model.layers.29.self_attn.v_proj.scales": "model-00011-of-00013.safetensors",
817
- "model.layers.29.self_attn.v_proj.weight": "model-00011-of-00013.safetensors",
818
  "model.layers.3.input_layernorm.weight": "model-00002-of-00013.safetensors",
819
  "model.layers.3.mlp.experts.down_proj.bias": "model-00002-of-00013.safetensors",
820
- "model.layers.3.mlp.experts.down_proj.biases": "model-00002-of-00013.safetensors",
821
  "model.layers.3.mlp.experts.down_proj.scales": "model-00002-of-00013.safetensors",
822
  "model.layers.3.mlp.experts.down_proj.weight": "model-00002-of-00013.safetensors",
823
  "model.layers.3.mlp.experts.gate_proj.bias": "model-00002-of-00013.safetensors",
824
- "model.layers.3.mlp.experts.gate_proj.biases": "model-00002-of-00013.safetensors",
825
  "model.layers.3.mlp.experts.gate_proj.scales": "model-00002-of-00013.safetensors",
826
  "model.layers.3.mlp.experts.gate_proj.weight": "model-00002-of-00013.safetensors",
827
  "model.layers.3.mlp.experts.up_proj.bias": "model-00002-of-00013.safetensors",
828
- "model.layers.3.mlp.experts.up_proj.biases": "model-00002-of-00013.safetensors",
829
  "model.layers.3.mlp.experts.up_proj.scales": "model-00002-of-00013.safetensors",
830
  "model.layers.3.mlp.experts.up_proj.weight": "model-00002-of-00013.safetensors",
831
  "model.layers.3.mlp.router.bias": "model-00002-of-00013.safetensors",
@@ -852,15 +780,12 @@
852
  "model.layers.3.self_attn.v_proj.weight": "model-00002-of-00013.safetensors",
853
  "model.layers.30.input_layernorm.weight": "model-00011-of-00013.safetensors",
854
  "model.layers.30.mlp.experts.down_proj.bias": "model-00011-of-00013.safetensors",
855
- "model.layers.30.mlp.experts.down_proj.biases": "model-00011-of-00013.safetensors",
856
  "model.layers.30.mlp.experts.down_proj.scales": "model-00011-of-00013.safetensors",
857
  "model.layers.30.mlp.experts.down_proj.weight": "model-00011-of-00013.safetensors",
858
  "model.layers.30.mlp.experts.gate_proj.bias": "model-00011-of-00013.safetensors",
859
- "model.layers.30.mlp.experts.gate_proj.biases": "model-00011-of-00013.safetensors",
860
  "model.layers.30.mlp.experts.gate_proj.scales": "model-00011-of-00013.safetensors",
861
  "model.layers.30.mlp.experts.gate_proj.weight": "model-00011-of-00013.safetensors",
862
  "model.layers.30.mlp.experts.up_proj.bias": "model-00011-of-00013.safetensors",
863
- "model.layers.30.mlp.experts.up_proj.biases": "model-00011-of-00013.safetensors",
864
  "model.layers.30.mlp.experts.up_proj.scales": "model-00011-of-00013.safetensors",
865
  "model.layers.30.mlp.experts.up_proj.weight": "model-00011-of-00013.safetensors",
866
  "model.layers.30.mlp.router.bias": "model-00011-of-00013.safetensors",
@@ -885,24 +810,21 @@
885
  "model.layers.30.self_attn.v_proj.biases": "model-00011-of-00013.safetensors",
886
  "model.layers.30.self_attn.v_proj.scales": "model-00011-of-00013.safetensors",
887
  "model.layers.30.self_attn.v_proj.weight": "model-00011-of-00013.safetensors",
888
- "model.layers.31.input_layernorm.weight": "model-00012-of-00013.safetensors",
889
- "model.layers.31.mlp.experts.down_proj.bias": "model-00012-of-00013.safetensors",
890
- "model.layers.31.mlp.experts.down_proj.biases": "model-00012-of-00013.safetensors",
891
- "model.layers.31.mlp.experts.down_proj.scales": "model-00012-of-00013.safetensors",
892
- "model.layers.31.mlp.experts.down_proj.weight": "model-00012-of-00013.safetensors",
893
- "model.layers.31.mlp.experts.gate_proj.bias": "model-00012-of-00013.safetensors",
894
- "model.layers.31.mlp.experts.gate_proj.biases": "model-00012-of-00013.safetensors",
895
- "model.layers.31.mlp.experts.gate_proj.scales": "model-00012-of-00013.safetensors",
896
- "model.layers.31.mlp.experts.gate_proj.weight": "model-00012-of-00013.safetensors",
897
- "model.layers.31.mlp.experts.up_proj.bias": "model-00012-of-00013.safetensors",
898
- "model.layers.31.mlp.experts.up_proj.biases": "model-00012-of-00013.safetensors",
899
- "model.layers.31.mlp.experts.up_proj.scales": "model-00012-of-00013.safetensors",
900
- "model.layers.31.mlp.experts.up_proj.weight": "model-00012-of-00013.safetensors",
901
- "model.layers.31.mlp.router.bias": "model-00012-of-00013.safetensors",
902
- "model.layers.31.mlp.router.biases": "model-00012-of-00013.safetensors",
903
- "model.layers.31.mlp.router.scales": "model-00012-of-00013.safetensors",
904
- "model.layers.31.mlp.router.weight": "model-00012-of-00013.safetensors",
905
- "model.layers.31.post_attention_layernorm.weight": "model-00012-of-00013.safetensors",
906
  "model.layers.31.self_attn.k_proj.bias": "model-00011-of-00013.safetensors",
907
  "model.layers.31.self_attn.k_proj.biases": "model-00011-of-00013.safetensors",
908
  "model.layers.31.self_attn.k_proj.scales": "model-00011-of-00013.safetensors",
@@ -922,57 +844,51 @@
922
  "model.layers.31.self_attn.v_proj.weight": "model-00011-of-00013.safetensors",
923
  "model.layers.32.input_layernorm.weight": "model-00012-of-00013.safetensors",
924
  "model.layers.32.mlp.experts.down_proj.bias": "model-00012-of-00013.safetensors",
925
- "model.layers.32.mlp.experts.down_proj.biases": "model-00012-of-00013.safetensors",
926
  "model.layers.32.mlp.experts.down_proj.scales": "model-00012-of-00013.safetensors",
927
  "model.layers.32.mlp.experts.down_proj.weight": "model-00012-of-00013.safetensors",
928
- "model.layers.32.mlp.experts.gate_proj.bias": "model-00012-of-00013.safetensors",
929
- "model.layers.32.mlp.experts.gate_proj.biases": "model-00012-of-00013.safetensors",
930
- "model.layers.32.mlp.experts.gate_proj.scales": "model-00012-of-00013.safetensors",
931
- "model.layers.32.mlp.experts.gate_proj.weight": "model-00012-of-00013.safetensors",
932
- "model.layers.32.mlp.experts.up_proj.bias": "model-00012-of-00013.safetensors",
933
- "model.layers.32.mlp.experts.up_proj.biases": "model-00012-of-00013.safetensors",
934
- "model.layers.32.mlp.experts.up_proj.scales": "model-00012-of-00013.safetensors",
935
- "model.layers.32.mlp.experts.up_proj.weight": "model-00012-of-00013.safetensors",
936
  "model.layers.32.mlp.router.bias": "model-00012-of-00013.safetensors",
937
  "model.layers.32.mlp.router.biases": "model-00012-of-00013.safetensors",
938
  "model.layers.32.mlp.router.scales": "model-00012-of-00013.safetensors",
939
  "model.layers.32.mlp.router.weight": "model-00012-of-00013.safetensors",
940
  "model.layers.32.post_attention_layernorm.weight": "model-00012-of-00013.safetensors",
941
- "model.layers.32.self_attn.k_proj.bias": "model-00012-of-00013.safetensors",
942
- "model.layers.32.self_attn.k_proj.biases": "model-00012-of-00013.safetensors",
943
- "model.layers.32.self_attn.k_proj.scales": "model-00012-of-00013.safetensors",
944
- "model.layers.32.self_attn.k_proj.weight": "model-00012-of-00013.safetensors",
945
- "model.layers.32.self_attn.o_proj.bias": "model-00012-of-00013.safetensors",
946
- "model.layers.32.self_attn.o_proj.biases": "model-00012-of-00013.safetensors",
947
- "model.layers.32.self_attn.o_proj.scales": "model-00012-of-00013.safetensors",
948
- "model.layers.32.self_attn.o_proj.weight": "model-00012-of-00013.safetensors",
949
- "model.layers.32.self_attn.q_proj.bias": "model-00012-of-00013.safetensors",
950
- "model.layers.32.self_attn.q_proj.biases": "model-00012-of-00013.safetensors",
951
- "model.layers.32.self_attn.q_proj.scales": "model-00012-of-00013.safetensors",
952
- "model.layers.32.self_attn.q_proj.weight": "model-00012-of-00013.safetensors",
953
- "model.layers.32.self_attn.sinks": "model-00012-of-00013.safetensors",
954
- "model.layers.32.self_attn.v_proj.bias": "model-00012-of-00013.safetensors",
955
- "model.layers.32.self_attn.v_proj.biases": "model-00012-of-00013.safetensors",
956
- "model.layers.32.self_attn.v_proj.scales": "model-00012-of-00013.safetensors",
957
- "model.layers.32.self_attn.v_proj.weight": "model-00012-of-00013.safetensors",
958
- "model.layers.33.input_layernorm.weight": "model-00013-of-00013.safetensors",
959
- "model.layers.33.mlp.experts.down_proj.bias": "model-00013-of-00013.safetensors",
960
- "model.layers.33.mlp.experts.down_proj.biases": "model-00013-of-00013.safetensors",
961
- "model.layers.33.mlp.experts.down_proj.scales": "model-00013-of-00013.safetensors",
962
  "model.layers.33.mlp.experts.down_proj.weight": "model-00012-of-00013.safetensors",
963
  "model.layers.33.mlp.experts.gate_proj.bias": "model-00012-of-00013.safetensors",
964
- "model.layers.33.mlp.experts.gate_proj.biases": "model-00012-of-00013.safetensors",
965
  "model.layers.33.mlp.experts.gate_proj.scales": "model-00012-of-00013.safetensors",
966
  "model.layers.33.mlp.experts.gate_proj.weight": "model-00012-of-00013.safetensors",
967
  "model.layers.33.mlp.experts.up_proj.bias": "model-00012-of-00013.safetensors",
968
- "model.layers.33.mlp.experts.up_proj.biases": "model-00012-of-00013.safetensors",
969
  "model.layers.33.mlp.experts.up_proj.scales": "model-00012-of-00013.safetensors",
970
  "model.layers.33.mlp.experts.up_proj.weight": "model-00012-of-00013.safetensors",
971
- "model.layers.33.mlp.router.bias": "model-00013-of-00013.safetensors",
972
- "model.layers.33.mlp.router.biases": "model-00013-of-00013.safetensors",
973
- "model.layers.33.mlp.router.scales": "model-00013-of-00013.safetensors",
974
- "model.layers.33.mlp.router.weight": "model-00013-of-00013.safetensors",
975
- "model.layers.33.post_attention_layernorm.weight": "model-00013-of-00013.safetensors",
976
  "model.layers.33.self_attn.k_proj.bias": "model-00012-of-00013.safetensors",
977
  "model.layers.33.self_attn.k_proj.biases": "model-00012-of-00013.safetensors",
978
  "model.layers.33.self_attn.k_proj.scales": "model-00012-of-00013.safetensors",
@@ -990,87 +906,78 @@
990
  "model.layers.33.self_attn.v_proj.biases": "model-00012-of-00013.safetensors",
991
  "model.layers.33.self_attn.v_proj.scales": "model-00012-of-00013.safetensors",
992
  "model.layers.33.self_attn.v_proj.weight": "model-00012-of-00013.safetensors",
993
- "model.layers.34.input_layernorm.weight": "model-00013-of-00013.safetensors",
994
- "model.layers.34.mlp.experts.down_proj.bias": "model-00013-of-00013.safetensors",
995
- "model.layers.34.mlp.experts.down_proj.biases": "model-00013-of-00013.safetensors",
996
- "model.layers.34.mlp.experts.down_proj.scales": "model-00013-of-00013.safetensors",
997
- "model.layers.34.mlp.experts.down_proj.weight": "model-00013-of-00013.safetensors",
998
- "model.layers.34.mlp.experts.gate_proj.bias": "model-00013-of-00013.safetensors",
999
- "model.layers.34.mlp.experts.gate_proj.biases": "model-00013-of-00013.safetensors",
1000
- "model.layers.34.mlp.experts.gate_proj.scales": "model-00013-of-00013.safetensors",
1001
- "model.layers.34.mlp.experts.gate_proj.weight": "model-00013-of-00013.safetensors",
1002
- "model.layers.34.mlp.experts.up_proj.bias": "model-00013-of-00013.safetensors",
1003
- "model.layers.34.mlp.experts.up_proj.biases": "model-00013-of-00013.safetensors",
1004
- "model.layers.34.mlp.experts.up_proj.scales": "model-00013-of-00013.safetensors",
1005
- "model.layers.34.mlp.experts.up_proj.weight": "model-00013-of-00013.safetensors",
1006
- "model.layers.34.mlp.router.bias": "model-00013-of-00013.safetensors",
1007
- "model.layers.34.mlp.router.biases": "model-00013-of-00013.safetensors",
1008
- "model.layers.34.mlp.router.scales": "model-00013-of-00013.safetensors",
1009
- "model.layers.34.mlp.router.weight": "model-00013-of-00013.safetensors",
1010
- "model.layers.34.post_attention_layernorm.weight": "model-00013-of-00013.safetensors",
1011
- "model.layers.34.self_attn.k_proj.bias": "model-00013-of-00013.safetensors",
1012
- "model.layers.34.self_attn.k_proj.biases": "model-00013-of-00013.safetensors",
1013
- "model.layers.34.self_attn.k_proj.scales": "model-00013-of-00013.safetensors",
1014
- "model.layers.34.self_attn.k_proj.weight": "model-00013-of-00013.safetensors",
1015
- "model.layers.34.self_attn.o_proj.bias": "model-00013-of-00013.safetensors",
1016
- "model.layers.34.self_attn.o_proj.biases": "model-00013-of-00013.safetensors",
1017
- "model.layers.34.self_attn.o_proj.scales": "model-00013-of-00013.safetensors",
1018
- "model.layers.34.self_attn.o_proj.weight": "model-00013-of-00013.safetensors",
1019
- "model.layers.34.self_attn.q_proj.bias": "model-00013-of-00013.safetensors",
1020
- "model.layers.34.self_attn.q_proj.biases": "model-00013-of-00013.safetensors",
1021
- "model.layers.34.self_attn.q_proj.scales": "model-00013-of-00013.safetensors",
1022
- "model.layers.34.self_attn.q_proj.weight": "model-00013-of-00013.safetensors",
1023
- "model.layers.34.self_attn.sinks": "model-00013-of-00013.safetensors",
1024
- "model.layers.34.self_attn.v_proj.bias": "model-00013-of-00013.safetensors",
1025
- "model.layers.34.self_attn.v_proj.biases": "model-00013-of-00013.safetensors",
1026
- "model.layers.34.self_attn.v_proj.scales": "model-00013-of-00013.safetensors",
1027
- "model.layers.34.self_attn.v_proj.weight": "model-00013-of-00013.safetensors",
1028
  "model.layers.35.input_layernorm.weight": "model-00013-of-00013.safetensors",
1029
  "model.layers.35.mlp.experts.down_proj.bias": "model-00013-of-00013.safetensors",
1030
- "model.layers.35.mlp.experts.down_proj.biases": "model-00013-of-00013.safetensors",
1031
  "model.layers.35.mlp.experts.down_proj.scales": "model-00013-of-00013.safetensors",
1032
  "model.layers.35.mlp.experts.down_proj.weight": "model-00013-of-00013.safetensors",
1033
- "model.layers.35.mlp.experts.gate_proj.bias": "model-00013-of-00013.safetensors",
1034
- "model.layers.35.mlp.experts.gate_proj.biases": "model-00013-of-00013.safetensors",
1035
- "model.layers.35.mlp.experts.gate_proj.scales": "model-00013-of-00013.safetensors",
1036
- "model.layers.35.mlp.experts.gate_proj.weight": "model-00013-of-00013.safetensors",
1037
- "model.layers.35.mlp.experts.up_proj.bias": "model-00013-of-00013.safetensors",
1038
- "model.layers.35.mlp.experts.up_proj.biases": "model-00013-of-00013.safetensors",
1039
- "model.layers.35.mlp.experts.up_proj.scales": "model-00013-of-00013.safetensors",
1040
- "model.layers.35.mlp.experts.up_proj.weight": "model-00013-of-00013.safetensors",
1041
  "model.layers.35.mlp.router.bias": "model-00013-of-00013.safetensors",
1042
  "model.layers.35.mlp.router.biases": "model-00013-of-00013.safetensors",
1043
  "model.layers.35.mlp.router.scales": "model-00013-of-00013.safetensors",
1044
  "model.layers.35.mlp.router.weight": "model-00013-of-00013.safetensors",
1045
  "model.layers.35.post_attention_layernorm.weight": "model-00013-of-00013.safetensors",
1046
- "model.layers.35.self_attn.k_proj.bias": "model-00013-of-00013.safetensors",
1047
- "model.layers.35.self_attn.k_proj.biases": "model-00013-of-00013.safetensors",
1048
- "model.layers.35.self_attn.k_proj.scales": "model-00013-of-00013.safetensors",
1049
- "model.layers.35.self_attn.k_proj.weight": "model-00013-of-00013.safetensors",
1050
- "model.layers.35.self_attn.o_proj.bias": "model-00013-of-00013.safetensors",
1051
- "model.layers.35.self_attn.o_proj.biases": "model-00013-of-00013.safetensors",
1052
- "model.layers.35.self_attn.o_proj.scales": "model-00013-of-00013.safetensors",
1053
- "model.layers.35.self_attn.o_proj.weight": "model-00013-of-00013.safetensors",
1054
- "model.layers.35.self_attn.q_proj.bias": "model-00013-of-00013.safetensors",
1055
- "model.layers.35.self_attn.q_proj.biases": "model-00013-of-00013.safetensors",
1056
- "model.layers.35.self_attn.q_proj.scales": "model-00013-of-00013.safetensors",
1057
- "model.layers.35.self_attn.q_proj.weight": "model-00013-of-00013.safetensors",
1058
- "model.layers.35.self_attn.sinks": "model-00013-of-00013.safetensors",
1059
- "model.layers.35.self_attn.v_proj.bias": "model-00013-of-00013.safetensors",
1060
- "model.layers.35.self_attn.v_proj.biases": "model-00013-of-00013.safetensors",
1061
- "model.layers.35.self_attn.v_proj.scales": "model-00013-of-00013.safetensors",
1062
- "model.layers.35.self_attn.v_proj.weight": "model-00013-of-00013.safetensors",
1063
  "model.layers.4.input_layernorm.weight": "model-00002-of-00013.safetensors",
1064
  "model.layers.4.mlp.experts.down_proj.bias": "model-00002-of-00013.safetensors",
1065
- "model.layers.4.mlp.experts.down_proj.biases": "model-00002-of-00013.safetensors",
1066
  "model.layers.4.mlp.experts.down_proj.scales": "model-00002-of-00013.safetensors",
1067
  "model.layers.4.mlp.experts.down_proj.weight": "model-00002-of-00013.safetensors",
1068
  "model.layers.4.mlp.experts.gate_proj.bias": "model-00002-of-00013.safetensors",
1069
- "model.layers.4.mlp.experts.gate_proj.biases": "model-00002-of-00013.safetensors",
1070
  "model.layers.4.mlp.experts.gate_proj.scales": "model-00002-of-00013.safetensors",
1071
  "model.layers.4.mlp.experts.gate_proj.weight": "model-00002-of-00013.safetensors",
1072
  "model.layers.4.mlp.experts.up_proj.bias": "model-00002-of-00013.safetensors",
1073
- "model.layers.4.mlp.experts.up_proj.biases": "model-00002-of-00013.safetensors",
1074
  "model.layers.4.mlp.experts.up_proj.scales": "model-00002-of-00013.safetensors",
1075
  "model.layers.4.mlp.experts.up_proj.weight": "model-00002-of-00013.safetensors",
1076
  "model.layers.4.mlp.router.bias": "model-00002-of-00013.safetensors",
@@ -1097,16 +1004,13 @@
1097
  "model.layers.4.self_attn.v_proj.weight": "model-00002-of-00013.safetensors",
1098
  "model.layers.5.input_layernorm.weight": "model-00003-of-00013.safetensors",
1099
  "model.layers.5.mlp.experts.down_proj.bias": "model-00003-of-00013.safetensors",
1100
- "model.layers.5.mlp.experts.down_proj.biases": "model-00003-of-00013.safetensors",
1101
  "model.layers.5.mlp.experts.down_proj.scales": "model-00003-of-00013.safetensors",
1102
  "model.layers.5.mlp.experts.down_proj.weight": "model-00003-of-00013.safetensors",
1103
  "model.layers.5.mlp.experts.gate_proj.bias": "model-00002-of-00013.safetensors",
1104
- "model.layers.5.mlp.experts.gate_proj.biases": "model-00002-of-00013.safetensors",
1105
  "model.layers.5.mlp.experts.gate_proj.scales": "model-00002-of-00013.safetensors",
1106
  "model.layers.5.mlp.experts.gate_proj.weight": "model-00002-of-00013.safetensors",
1107
- "model.layers.5.mlp.experts.up_proj.bias": "model-00003-of-00013.safetensors",
1108
- "model.layers.5.mlp.experts.up_proj.biases": "model-00003-of-00013.safetensors",
1109
- "model.layers.5.mlp.experts.up_proj.scales": "model-00003-of-00013.safetensors",
1110
  "model.layers.5.mlp.experts.up_proj.weight": "model-00002-of-00013.safetensors",
1111
  "model.layers.5.mlp.router.bias": "model-00003-of-00013.safetensors",
1112
  "model.layers.5.mlp.router.biases": "model-00003-of-00013.safetensors",
@@ -1132,15 +1036,12 @@
1132
  "model.layers.5.self_attn.v_proj.weight": "model-00002-of-00013.safetensors",
1133
  "model.layers.6.input_layernorm.weight": "model-00003-of-00013.safetensors",
1134
  "model.layers.6.mlp.experts.down_proj.bias": "model-00003-of-00013.safetensors",
1135
- "model.layers.6.mlp.experts.down_proj.biases": "model-00003-of-00013.safetensors",
1136
  "model.layers.6.mlp.experts.down_proj.scales": "model-00003-of-00013.safetensors",
1137
  "model.layers.6.mlp.experts.down_proj.weight": "model-00003-of-00013.safetensors",
1138
  "model.layers.6.mlp.experts.gate_proj.bias": "model-00003-of-00013.safetensors",
1139
- "model.layers.6.mlp.experts.gate_proj.biases": "model-00003-of-00013.safetensors",
1140
  "model.layers.6.mlp.experts.gate_proj.scales": "model-00003-of-00013.safetensors",
1141
  "model.layers.6.mlp.experts.gate_proj.weight": "model-00003-of-00013.safetensors",
1142
  "model.layers.6.mlp.experts.up_proj.bias": "model-00003-of-00013.safetensors",
1143
- "model.layers.6.mlp.experts.up_proj.biases": "model-00003-of-00013.safetensors",
1144
  "model.layers.6.mlp.experts.up_proj.scales": "model-00003-of-00013.safetensors",
1145
  "model.layers.6.mlp.experts.up_proj.weight": "model-00003-of-00013.safetensors",
1146
  "model.layers.6.mlp.router.bias": "model-00003-of-00013.safetensors",
@@ -1167,15 +1068,12 @@
1167
  "model.layers.6.self_attn.v_proj.weight": "model-00003-of-00013.safetensors",
1168
  "model.layers.7.input_layernorm.weight": "model-00003-of-00013.safetensors",
1169
  "model.layers.7.mlp.experts.down_proj.bias": "model-00003-of-00013.safetensors",
1170
- "model.layers.7.mlp.experts.down_proj.biases": "model-00003-of-00013.safetensors",
1171
  "model.layers.7.mlp.experts.down_proj.scales": "model-00003-of-00013.safetensors",
1172
  "model.layers.7.mlp.experts.down_proj.weight": "model-00003-of-00013.safetensors",
1173
  "model.layers.7.mlp.experts.gate_proj.bias": "model-00003-of-00013.safetensors",
1174
- "model.layers.7.mlp.experts.gate_proj.biases": "model-00003-of-00013.safetensors",
1175
  "model.layers.7.mlp.experts.gate_proj.scales": "model-00003-of-00013.safetensors",
1176
  "model.layers.7.mlp.experts.gate_proj.weight": "model-00003-of-00013.safetensors",
1177
  "model.layers.7.mlp.experts.up_proj.bias": "model-00003-of-00013.safetensors",
1178
- "model.layers.7.mlp.experts.up_proj.biases": "model-00003-of-00013.safetensors",
1179
  "model.layers.7.mlp.experts.up_proj.scales": "model-00003-of-00013.safetensors",
1180
  "model.layers.7.mlp.experts.up_proj.weight": "model-00003-of-00013.safetensors",
1181
  "model.layers.7.mlp.router.bias": "model-00003-of-00013.safetensors",
@@ -1202,17 +1100,14 @@
1202
  "model.layers.7.self_attn.v_proj.weight": "model-00003-of-00013.safetensors",
1203
  "model.layers.8.input_layernorm.weight": "model-00004-of-00013.safetensors",
1204
  "model.layers.8.mlp.experts.down_proj.bias": "model-00004-of-00013.safetensors",
1205
- "model.layers.8.mlp.experts.down_proj.biases": "model-00004-of-00013.safetensors",
1206
  "model.layers.8.mlp.experts.down_proj.scales": "model-00004-of-00013.safetensors",
1207
  "model.layers.8.mlp.experts.down_proj.weight": "model-00004-of-00013.safetensors",
1208
  "model.layers.8.mlp.experts.gate_proj.bias": "model-00003-of-00013.safetensors",
1209
- "model.layers.8.mlp.experts.gate_proj.biases": "model-00003-of-00013.safetensors",
1210
  "model.layers.8.mlp.experts.gate_proj.scales": "model-00003-of-00013.safetensors",
1211
  "model.layers.8.mlp.experts.gate_proj.weight": "model-00003-of-00013.safetensors",
1212
- "model.layers.8.mlp.experts.up_proj.bias": "model-00004-of-00013.safetensors",
1213
- "model.layers.8.mlp.experts.up_proj.biases": "model-00004-of-00013.safetensors",
1214
- "model.layers.8.mlp.experts.up_proj.scales": "model-00004-of-00013.safetensors",
1215
- "model.layers.8.mlp.experts.up_proj.weight": "model-00004-of-00013.safetensors",
1216
  "model.layers.8.mlp.router.bias": "model-00004-of-00013.safetensors",
1217
  "model.layers.8.mlp.router.biases": "model-00004-of-00013.safetensors",
1218
  "model.layers.8.mlp.router.scales": "model-00004-of-00013.safetensors",
@@ -1237,15 +1132,12 @@
1237
  "model.layers.8.self_attn.v_proj.weight": "model-00003-of-00013.safetensors",
1238
  "model.layers.9.input_layernorm.weight": "model-00004-of-00013.safetensors",
1239
  "model.layers.9.mlp.experts.down_proj.bias": "model-00004-of-00013.safetensors",
1240
- "model.layers.9.mlp.experts.down_proj.biases": "model-00004-of-00013.safetensors",
1241
  "model.layers.9.mlp.experts.down_proj.scales": "model-00004-of-00013.safetensors",
1242
  "model.layers.9.mlp.experts.down_proj.weight": "model-00004-of-00013.safetensors",
1243
  "model.layers.9.mlp.experts.gate_proj.bias": "model-00004-of-00013.safetensors",
1244
- "model.layers.9.mlp.experts.gate_proj.biases": "model-00004-of-00013.safetensors",
1245
  "model.layers.9.mlp.experts.gate_proj.scales": "model-00004-of-00013.safetensors",
1246
  "model.layers.9.mlp.experts.gate_proj.weight": "model-00004-of-00013.safetensors",
1247
  "model.layers.9.mlp.experts.up_proj.bias": "model-00004-of-00013.safetensors",
1248
- "model.layers.9.mlp.experts.up_proj.biases": "model-00004-of-00013.safetensors",
1249
  "model.layers.9.mlp.experts.up_proj.scales": "model-00004-of-00013.safetensors",
1250
  "model.layers.9.mlp.experts.up_proj.weight": "model-00004-of-00013.safetensors",
1251
  "model.layers.9.mlp.router.bias": "model-00004-of-00013.safetensors",
 
1
  {
2
  "metadata": {
3
+ "total_size": 62197812864,
4
  "total_parameters": 116829154368
5
  },
6
  "weight_map": {
 
12
  "model.embed_tokens.weight": "model-00001-of-00013.safetensors",
13
  "model.layers.0.input_layernorm.weight": "model-00001-of-00013.safetensors",
14
  "model.layers.0.mlp.experts.down_proj.bias": "model-00001-of-00013.safetensors",
 
15
  "model.layers.0.mlp.experts.down_proj.scales": "model-00001-of-00013.safetensors",
16
  "model.layers.0.mlp.experts.down_proj.weight": "model-00001-of-00013.safetensors",
17
  "model.layers.0.mlp.experts.gate_proj.bias": "model-00001-of-00013.safetensors",
 
18
  "model.layers.0.mlp.experts.gate_proj.scales": "model-00001-of-00013.safetensors",
19
  "model.layers.0.mlp.experts.gate_proj.weight": "model-00001-of-00013.safetensors",
20
  "model.layers.0.mlp.experts.up_proj.bias": "model-00001-of-00013.safetensors",
 
21
  "model.layers.0.mlp.experts.up_proj.scales": "model-00001-of-00013.safetensors",
22
  "model.layers.0.mlp.experts.up_proj.weight": "model-00001-of-00013.safetensors",
23
  "model.layers.0.mlp.router.bias": "model-00001-of-00013.safetensors",
 
44
  "model.layers.0.self_attn.v_proj.weight": "model-00001-of-00013.safetensors",
45
  "model.layers.1.input_layernorm.weight": "model-00001-of-00013.safetensors",
46
  "model.layers.1.mlp.experts.down_proj.bias": "model-00001-of-00013.safetensors",
 
47
  "model.layers.1.mlp.experts.down_proj.scales": "model-00001-of-00013.safetensors",
48
  "model.layers.1.mlp.experts.down_proj.weight": "model-00001-of-00013.safetensors",
49
  "model.layers.1.mlp.experts.gate_proj.bias": "model-00001-of-00013.safetensors",
 
50
  "model.layers.1.mlp.experts.gate_proj.scales": "model-00001-of-00013.safetensors",
51
  "model.layers.1.mlp.experts.gate_proj.weight": "model-00001-of-00013.safetensors",
52
  "model.layers.1.mlp.experts.up_proj.bias": "model-00001-of-00013.safetensors",
 
53
  "model.layers.1.mlp.experts.up_proj.scales": "model-00001-of-00013.safetensors",
54
  "model.layers.1.mlp.experts.up_proj.weight": "model-00001-of-00013.safetensors",
55
  "model.layers.1.mlp.router.bias": "model-00001-of-00013.safetensors",
 
76
  "model.layers.1.self_attn.v_proj.weight": "model-00001-of-00013.safetensors",
77
  "model.layers.10.input_layernorm.weight": "model-00004-of-00013.safetensors",
78
  "model.layers.10.mlp.experts.down_proj.bias": "model-00004-of-00013.safetensors",
 
79
  "model.layers.10.mlp.experts.down_proj.scales": "model-00004-of-00013.safetensors",
80
  "model.layers.10.mlp.experts.down_proj.weight": "model-00004-of-00013.safetensors",
81
  "model.layers.10.mlp.experts.gate_proj.bias": "model-00004-of-00013.safetensors",
 
82
  "model.layers.10.mlp.experts.gate_proj.scales": "model-00004-of-00013.safetensors",
83
  "model.layers.10.mlp.experts.gate_proj.weight": "model-00004-of-00013.safetensors",
84
  "model.layers.10.mlp.experts.up_proj.bias": "model-00004-of-00013.safetensors",
 
85
  "model.layers.10.mlp.experts.up_proj.scales": "model-00004-of-00013.safetensors",
86
  "model.layers.10.mlp.experts.up_proj.weight": "model-00004-of-00013.safetensors",
87
  "model.layers.10.mlp.router.bias": "model-00004-of-00013.safetensors",
 
108
  "model.layers.10.self_attn.v_proj.weight": "model-00004-of-00013.safetensors",
109
  "model.layers.11.input_layernorm.weight": "model-00005-of-00013.safetensors",
110
  "model.layers.11.mlp.experts.down_proj.bias": "model-00005-of-00013.safetensors",
 
111
  "model.layers.11.mlp.experts.down_proj.scales": "model-00005-of-00013.safetensors",
112
  "model.layers.11.mlp.experts.down_proj.weight": "model-00005-of-00013.safetensors",
113
+ "model.layers.11.mlp.experts.gate_proj.bias": "model-00004-of-00013.safetensors",
114
+ "model.layers.11.mlp.experts.gate_proj.scales": "model-00004-of-00013.safetensors",
 
115
  "model.layers.11.mlp.experts.gate_proj.weight": "model-00004-of-00013.safetensors",
116
+ "model.layers.11.mlp.experts.up_proj.bias": "model-00004-of-00013.safetensors",
117
+ "model.layers.11.mlp.experts.up_proj.scales": "model-00004-of-00013.safetensors",
118
+ "model.layers.11.mlp.experts.up_proj.weight": "model-00004-of-00013.safetensors",
 
119
  "model.layers.11.mlp.router.bias": "model-00005-of-00013.safetensors",
120
  "model.layers.11.mlp.router.biases": "model-00005-of-00013.safetensors",
121
  "model.layers.11.mlp.router.scales": "model-00005-of-00013.safetensors",
 
140
  "model.layers.11.self_attn.v_proj.weight": "model-00004-of-00013.safetensors",
141
  "model.layers.12.input_layernorm.weight": "model-00005-of-00013.safetensors",
142
  "model.layers.12.mlp.experts.down_proj.bias": "model-00005-of-00013.safetensors",
 
143
  "model.layers.12.mlp.experts.down_proj.scales": "model-00005-of-00013.safetensors",
144
  "model.layers.12.mlp.experts.down_proj.weight": "model-00005-of-00013.safetensors",
145
  "model.layers.12.mlp.experts.gate_proj.bias": "model-00005-of-00013.safetensors",
 
146
  "model.layers.12.mlp.experts.gate_proj.scales": "model-00005-of-00013.safetensors",
147
  "model.layers.12.mlp.experts.gate_proj.weight": "model-00005-of-00013.safetensors",
148
  "model.layers.12.mlp.experts.up_proj.bias": "model-00005-of-00013.safetensors",
 
149
  "model.layers.12.mlp.experts.up_proj.scales": "model-00005-of-00013.safetensors",
150
  "model.layers.12.mlp.experts.up_proj.weight": "model-00005-of-00013.safetensors",
151
  "model.layers.12.mlp.router.bias": "model-00005-of-00013.safetensors",
 
172
  "model.layers.12.self_attn.v_proj.weight": "model-00005-of-00013.safetensors",
173
  "model.layers.13.input_layernorm.weight": "model-00005-of-00013.safetensors",
174
  "model.layers.13.mlp.experts.down_proj.bias": "model-00005-of-00013.safetensors",
 
175
  "model.layers.13.mlp.experts.down_proj.scales": "model-00005-of-00013.safetensors",
176
  "model.layers.13.mlp.experts.down_proj.weight": "model-00005-of-00013.safetensors",
177
  "model.layers.13.mlp.experts.gate_proj.bias": "model-00005-of-00013.safetensors",
 
178
  "model.layers.13.mlp.experts.gate_proj.scales": "model-00005-of-00013.safetensors",
179
  "model.layers.13.mlp.experts.gate_proj.weight": "model-00005-of-00013.safetensors",
180
  "model.layers.13.mlp.experts.up_proj.bias": "model-00005-of-00013.safetensors",
 
181
  "model.layers.13.mlp.experts.up_proj.scales": "model-00005-of-00013.safetensors",
182
  "model.layers.13.mlp.experts.up_proj.weight": "model-00005-of-00013.safetensors",
183
  "model.layers.13.mlp.router.bias": "model-00005-of-00013.safetensors",
 
204
  "model.layers.13.self_attn.v_proj.weight": "model-00005-of-00013.safetensors",
205
  "model.layers.14.input_layernorm.weight": "model-00006-of-00013.safetensors",
206
  "model.layers.14.mlp.experts.down_proj.bias": "model-00006-of-00013.safetensors",
 
207
  "model.layers.14.mlp.experts.down_proj.scales": "model-00006-of-00013.safetensors",
208
  "model.layers.14.mlp.experts.down_proj.weight": "model-00006-of-00013.safetensors",
209
+ "model.layers.14.mlp.experts.gate_proj.bias": "model-00005-of-00013.safetensors",
210
+ "model.layers.14.mlp.experts.gate_proj.scales": "model-00005-of-00013.safetensors",
211
+ "model.layers.14.mlp.experts.gate_proj.weight": "model-00005-of-00013.safetensors",
212
+ "model.layers.14.mlp.experts.up_proj.bias": "model-00005-of-00013.safetensors",
213
+ "model.layers.14.mlp.experts.up_proj.scales": "model-00005-of-00013.safetensors",
214
+ "model.layers.14.mlp.experts.up_proj.weight": "model-00005-of-00013.safetensors",
 
 
215
  "model.layers.14.mlp.router.bias": "model-00006-of-00013.safetensors",
216
  "model.layers.14.mlp.router.biases": "model-00006-of-00013.safetensors",
217
  "model.layers.14.mlp.router.scales": "model-00006-of-00013.safetensors",
 
236
  "model.layers.14.self_attn.v_proj.weight": "model-00005-of-00013.safetensors",
237
  "model.layers.15.input_layernorm.weight": "model-00006-of-00013.safetensors",
238
  "model.layers.15.mlp.experts.down_proj.bias": "model-00006-of-00013.safetensors",
 
239
  "model.layers.15.mlp.experts.down_proj.scales": "model-00006-of-00013.safetensors",
240
  "model.layers.15.mlp.experts.down_proj.weight": "model-00006-of-00013.safetensors",
241
  "model.layers.15.mlp.experts.gate_proj.bias": "model-00006-of-00013.safetensors",
 
242
  "model.layers.15.mlp.experts.gate_proj.scales": "model-00006-of-00013.safetensors",
243
  "model.layers.15.mlp.experts.gate_proj.weight": "model-00006-of-00013.safetensors",
244
  "model.layers.15.mlp.experts.up_proj.bias": "model-00006-of-00013.safetensors",
 
245
  "model.layers.15.mlp.experts.up_proj.scales": "model-00006-of-00013.safetensors",
246
  "model.layers.15.mlp.experts.up_proj.weight": "model-00006-of-00013.safetensors",
247
  "model.layers.15.mlp.router.bias": "model-00006-of-00013.safetensors",
 
266
  "model.layers.15.self_attn.v_proj.biases": "model-00006-of-00013.safetensors",
267
  "model.layers.15.self_attn.v_proj.scales": "model-00006-of-00013.safetensors",
268
  "model.layers.15.self_attn.v_proj.weight": "model-00006-of-00013.safetensors",
269
+ "model.layers.16.input_layernorm.weight": "model-00006-of-00013.safetensors",
270
+ "model.layers.16.mlp.experts.down_proj.bias": "model-00006-of-00013.safetensors",
271
+ "model.layers.16.mlp.experts.down_proj.scales": "model-00006-of-00013.safetensors",
 
272
  "model.layers.16.mlp.experts.down_proj.weight": "model-00006-of-00013.safetensors",
273
  "model.layers.16.mlp.experts.gate_proj.bias": "model-00006-of-00013.safetensors",
 
274
  "model.layers.16.mlp.experts.gate_proj.scales": "model-00006-of-00013.safetensors",
275
  "model.layers.16.mlp.experts.gate_proj.weight": "model-00006-of-00013.safetensors",
276
  "model.layers.16.mlp.experts.up_proj.bias": "model-00006-of-00013.safetensors",
 
277
  "model.layers.16.mlp.experts.up_proj.scales": "model-00006-of-00013.safetensors",
278
  "model.layers.16.mlp.experts.up_proj.weight": "model-00006-of-00013.safetensors",
279
+ "model.layers.16.mlp.router.bias": "model-00006-of-00013.safetensors",
280
+ "model.layers.16.mlp.router.biases": "model-00006-of-00013.safetensors",
281
+ "model.layers.16.mlp.router.scales": "model-00006-of-00013.safetensors",
282
+ "model.layers.16.mlp.router.weight": "model-00006-of-00013.safetensors",
283
+ "model.layers.16.post_attention_layernorm.weight": "model-00006-of-00013.safetensors",
284
  "model.layers.16.self_attn.k_proj.bias": "model-00006-of-00013.safetensors",
285
  "model.layers.16.self_attn.k_proj.biases": "model-00006-of-00013.safetensors",
286
  "model.layers.16.self_attn.k_proj.scales": "model-00006-of-00013.safetensors",
 
300
  "model.layers.16.self_attn.v_proj.weight": "model-00006-of-00013.safetensors",
301
  "model.layers.17.input_layernorm.weight": "model-00007-of-00013.safetensors",
302
  "model.layers.17.mlp.experts.down_proj.bias": "model-00007-of-00013.safetensors",
 
303
  "model.layers.17.mlp.experts.down_proj.scales": "model-00007-of-00013.safetensors",
304
  "model.layers.17.mlp.experts.down_proj.weight": "model-00007-of-00013.safetensors",
305
+ "model.layers.17.mlp.experts.gate_proj.bias": "model-00006-of-00013.safetensors",
306
+ "model.layers.17.mlp.experts.gate_proj.scales": "model-00006-of-00013.safetensors",
307
+ "model.layers.17.mlp.experts.gate_proj.weight": "model-00006-of-00013.safetensors",
308
+ "model.layers.17.mlp.experts.up_proj.bias": "model-00006-of-00013.safetensors",
309
+ "model.layers.17.mlp.experts.up_proj.scales": "model-00006-of-00013.safetensors",
310
+ "model.layers.17.mlp.experts.up_proj.weight": "model-00006-of-00013.safetensors",
 
 
311
  "model.layers.17.mlp.router.bias": "model-00007-of-00013.safetensors",
312
  "model.layers.17.mlp.router.biases": "model-00007-of-00013.safetensors",
313
  "model.layers.17.mlp.router.scales": "model-00007-of-00013.safetensors",
314
  "model.layers.17.mlp.router.weight": "model-00007-of-00013.safetensors",
315
  "model.layers.17.post_attention_layernorm.weight": "model-00007-of-00013.safetensors",
316
+ "model.layers.17.self_attn.k_proj.bias": "model-00006-of-00013.safetensors",
317
+ "model.layers.17.self_attn.k_proj.biases": "model-00006-of-00013.safetensors",
318
+ "model.layers.17.self_attn.k_proj.scales": "model-00006-of-00013.safetensors",
319
+ "model.layers.17.self_attn.k_proj.weight": "model-00006-of-00013.safetensors",
320
+ "model.layers.17.self_attn.o_proj.bias": "model-00006-of-00013.safetensors",
321
+ "model.layers.17.self_attn.o_proj.biases": "model-00006-of-00013.safetensors",
322
+ "model.layers.17.self_attn.o_proj.scales": "model-00006-of-00013.safetensors",
323
+ "model.layers.17.self_attn.o_proj.weight": "model-00006-of-00013.safetensors",
324
+ "model.layers.17.self_attn.q_proj.bias": "model-00006-of-00013.safetensors",
325
+ "model.layers.17.self_attn.q_proj.biases": "model-00006-of-00013.safetensors",
326
+ "model.layers.17.self_attn.q_proj.scales": "model-00006-of-00013.safetensors",
327
+ "model.layers.17.self_attn.q_proj.weight": "model-00006-of-00013.safetensors",
328
+ "model.layers.17.self_attn.sinks": "model-00006-of-00013.safetensors",
329
+ "model.layers.17.self_attn.v_proj.bias": "model-00006-of-00013.safetensors",
330
+ "model.layers.17.self_attn.v_proj.biases": "model-00006-of-00013.safetensors",
331
+ "model.layers.17.self_attn.v_proj.scales": "model-00006-of-00013.safetensors",
332
+ "model.layers.17.self_attn.v_proj.weight": "model-00006-of-00013.safetensors",
333
  "model.layers.18.input_layernorm.weight": "model-00007-of-00013.safetensors",
334
  "model.layers.18.mlp.experts.down_proj.bias": "model-00007-of-00013.safetensors",
 
335
  "model.layers.18.mlp.experts.down_proj.scales": "model-00007-of-00013.safetensors",
336
  "model.layers.18.mlp.experts.down_proj.weight": "model-00007-of-00013.safetensors",
337
  "model.layers.18.mlp.experts.gate_proj.bias": "model-00007-of-00013.safetensors",
 
338
  "model.layers.18.mlp.experts.gate_proj.scales": "model-00007-of-00013.safetensors",
339
  "model.layers.18.mlp.experts.gate_proj.weight": "model-00007-of-00013.safetensors",
340
  "model.layers.18.mlp.experts.up_proj.bias": "model-00007-of-00013.safetensors",
 
341
  "model.layers.18.mlp.experts.up_proj.scales": "model-00007-of-00013.safetensors",
342
  "model.layers.18.mlp.experts.up_proj.weight": "model-00007-of-00013.safetensors",
343
  "model.layers.18.mlp.router.bias": "model-00007-of-00013.safetensors",
 
362
  "model.layers.18.self_attn.v_proj.biases": "model-00007-of-00013.safetensors",
363
  "model.layers.18.self_attn.v_proj.scales": "model-00007-of-00013.safetensors",
364
  "model.layers.18.self_attn.v_proj.weight": "model-00007-of-00013.safetensors",
365
+ "model.layers.19.input_layernorm.weight": "model-00007-of-00013.safetensors",
366
+ "model.layers.19.mlp.experts.down_proj.bias": "model-00007-of-00013.safetensors",
367
+ "model.layers.19.mlp.experts.down_proj.scales": "model-00007-of-00013.safetensors",
368
+ "model.layers.19.mlp.experts.down_proj.weight": "model-00007-of-00013.safetensors",
 
369
  "model.layers.19.mlp.experts.gate_proj.bias": "model-00007-of-00013.safetensors",
 
370
  "model.layers.19.mlp.experts.gate_proj.scales": "model-00007-of-00013.safetensors",
371
  "model.layers.19.mlp.experts.gate_proj.weight": "model-00007-of-00013.safetensors",
372
  "model.layers.19.mlp.experts.up_proj.bias": "model-00007-of-00013.safetensors",
 
373
  "model.layers.19.mlp.experts.up_proj.scales": "model-00007-of-00013.safetensors",
374
  "model.layers.19.mlp.experts.up_proj.weight": "model-00007-of-00013.safetensors",
375
+ "model.layers.19.mlp.router.bias": "model-00007-of-00013.safetensors",
376
+ "model.layers.19.mlp.router.biases": "model-00007-of-00013.safetensors",
377
+ "model.layers.19.mlp.router.scales": "model-00007-of-00013.safetensors",
378
+ "model.layers.19.mlp.router.weight": "model-00007-of-00013.safetensors",
379
+ "model.layers.19.post_attention_layernorm.weight": "model-00007-of-00013.safetensors",
380
  "model.layers.19.self_attn.k_proj.bias": "model-00007-of-00013.safetensors",
381
  "model.layers.19.self_attn.k_proj.biases": "model-00007-of-00013.safetensors",
382
  "model.layers.19.self_attn.k_proj.scales": "model-00007-of-00013.safetensors",
 
396
  "model.layers.19.self_attn.v_proj.weight": "model-00007-of-00013.safetensors",
397
  "model.layers.2.input_layernorm.weight": "model-00002-of-00013.safetensors",
398
  "model.layers.2.mlp.experts.down_proj.bias": "model-00002-of-00013.safetensors",
 
399
  "model.layers.2.mlp.experts.down_proj.scales": "model-00002-of-00013.safetensors",
400
  "model.layers.2.mlp.experts.down_proj.weight": "model-00002-of-00013.safetensors",
401
  "model.layers.2.mlp.experts.gate_proj.bias": "model-00001-of-00013.safetensors",
 
402
  "model.layers.2.mlp.experts.gate_proj.scales": "model-00001-of-00013.safetensors",
403
  "model.layers.2.mlp.experts.gate_proj.weight": "model-00001-of-00013.safetensors",
404
  "model.layers.2.mlp.experts.up_proj.bias": "model-00001-of-00013.safetensors",
 
405
  "model.layers.2.mlp.experts.up_proj.scales": "model-00001-of-00013.safetensors",
406
  "model.layers.2.mlp.experts.up_proj.weight": "model-00001-of-00013.safetensors",
407
  "model.layers.2.mlp.router.bias": "model-00002-of-00013.safetensors",
 
428
  "model.layers.2.self_attn.v_proj.weight": "model-00001-of-00013.safetensors",
429
  "model.layers.20.input_layernorm.weight": "model-00008-of-00013.safetensors",
430
  "model.layers.20.mlp.experts.down_proj.bias": "model-00008-of-00013.safetensors",
 
431
  "model.layers.20.mlp.experts.down_proj.scales": "model-00008-of-00013.safetensors",
432
  "model.layers.20.mlp.experts.down_proj.weight": "model-00008-of-00013.safetensors",
433
+ "model.layers.20.mlp.experts.gate_proj.bias": "model-00007-of-00013.safetensors",
434
+ "model.layers.20.mlp.experts.gate_proj.scales": "model-00007-of-00013.safetensors",
435
+ "model.layers.20.mlp.experts.gate_proj.weight": "model-00007-of-00013.safetensors",
436
+ "model.layers.20.mlp.experts.up_proj.bias": "model-00007-of-00013.safetensors",
437
+ "model.layers.20.mlp.experts.up_proj.scales": "model-00007-of-00013.safetensors",
438
+ "model.layers.20.mlp.experts.up_proj.weight": "model-00007-of-00013.safetensors",
 
 
439
  "model.layers.20.mlp.router.bias": "model-00008-of-00013.safetensors",
440
  "model.layers.20.mlp.router.biases": "model-00008-of-00013.safetensors",
441
  "model.layers.20.mlp.router.scales": "model-00008-of-00013.safetensors",
442
  "model.layers.20.mlp.router.weight": "model-00008-of-00013.safetensors",
443
  "model.layers.20.post_attention_layernorm.weight": "model-00008-of-00013.safetensors",
444
+ "model.layers.20.self_attn.k_proj.bias": "model-00007-of-00013.safetensors",
445
+ "model.layers.20.self_attn.k_proj.biases": "model-00007-of-00013.safetensors",
446
+ "model.layers.20.self_attn.k_proj.scales": "model-00007-of-00013.safetensors",
447
+ "model.layers.20.self_attn.k_proj.weight": "model-00007-of-00013.safetensors",
448
+ "model.layers.20.self_attn.o_proj.bias": "model-00007-of-00013.safetensors",
449
+ "model.layers.20.self_attn.o_proj.biases": "model-00007-of-00013.safetensors",
450
+ "model.layers.20.self_attn.o_proj.scales": "model-00007-of-00013.safetensors",
451
+ "model.layers.20.self_attn.o_proj.weight": "model-00007-of-00013.safetensors",
452
+ "model.layers.20.self_attn.q_proj.bias": "model-00007-of-00013.safetensors",
453
+ "model.layers.20.self_attn.q_proj.biases": "model-00007-of-00013.safetensors",
454
+ "model.layers.20.self_attn.q_proj.scales": "model-00007-of-00013.safetensors",
455
+ "model.layers.20.self_attn.q_proj.weight": "model-00007-of-00013.safetensors",
456
+ "model.layers.20.self_attn.sinks": "model-00007-of-00013.safetensors",
457
+ "model.layers.20.self_attn.v_proj.bias": "model-00007-of-00013.safetensors",
458
+ "model.layers.20.self_attn.v_proj.biases": "model-00007-of-00013.safetensors",
459
+ "model.layers.20.self_attn.v_proj.scales": "model-00007-of-00013.safetensors",
460
+ "model.layers.20.self_attn.v_proj.weight": "model-00007-of-00013.safetensors",
461
  "model.layers.21.input_layernorm.weight": "model-00008-of-00013.safetensors",
462
  "model.layers.21.mlp.experts.down_proj.bias": "model-00008-of-00013.safetensors",
 
463
  "model.layers.21.mlp.experts.down_proj.scales": "model-00008-of-00013.safetensors",
464
  "model.layers.21.mlp.experts.down_proj.weight": "model-00008-of-00013.safetensors",
465
  "model.layers.21.mlp.experts.gate_proj.bias": "model-00008-of-00013.safetensors",
 
466
  "model.layers.21.mlp.experts.gate_proj.scales": "model-00008-of-00013.safetensors",
467
  "model.layers.21.mlp.experts.gate_proj.weight": "model-00008-of-00013.safetensors",
468
  "model.layers.21.mlp.experts.up_proj.bias": "model-00008-of-00013.safetensors",
 
469
  "model.layers.21.mlp.experts.up_proj.scales": "model-00008-of-00013.safetensors",
470
  "model.layers.21.mlp.experts.up_proj.weight": "model-00008-of-00013.safetensors",
471
  "model.layers.21.mlp.router.bias": "model-00008-of-00013.safetensors",
 
490
  "model.layers.21.self_attn.v_proj.biases": "model-00008-of-00013.safetensors",
491
  "model.layers.21.self_attn.v_proj.scales": "model-00008-of-00013.safetensors",
492
  "model.layers.21.self_attn.v_proj.weight": "model-00008-of-00013.safetensors",
493
+ "model.layers.22.input_layernorm.weight": "model-00008-of-00013.safetensors",
494
+ "model.layers.22.mlp.experts.down_proj.bias": "model-00008-of-00013.safetensors",
495
+ "model.layers.22.mlp.experts.down_proj.scales": "model-00008-of-00013.safetensors",
496
+ "model.layers.22.mlp.experts.down_proj.weight": "model-00008-of-00013.safetensors",
 
497
  "model.layers.22.mlp.experts.gate_proj.bias": "model-00008-of-00013.safetensors",
 
498
  "model.layers.22.mlp.experts.gate_proj.scales": "model-00008-of-00013.safetensors",
499
  "model.layers.22.mlp.experts.gate_proj.weight": "model-00008-of-00013.safetensors",
500
+ "model.layers.22.mlp.experts.up_proj.bias": "model-00008-of-00013.safetensors",
501
+ "model.layers.22.mlp.experts.up_proj.scales": "model-00008-of-00013.safetensors",
 
502
  "model.layers.22.mlp.experts.up_proj.weight": "model-00008-of-00013.safetensors",
503
+ "model.layers.22.mlp.router.bias": "model-00008-of-00013.safetensors",
504
+ "model.layers.22.mlp.router.biases": "model-00008-of-00013.safetensors",
505
+ "model.layers.22.mlp.router.scales": "model-00008-of-00013.safetensors",
506
+ "model.layers.22.mlp.router.weight": "model-00008-of-00013.safetensors",
507
+ "model.layers.22.post_attention_layernorm.weight": "model-00008-of-00013.safetensors",
508
  "model.layers.22.self_attn.k_proj.bias": "model-00008-of-00013.safetensors",
509
  "model.layers.22.self_attn.k_proj.biases": "model-00008-of-00013.safetensors",
510
  "model.layers.22.self_attn.k_proj.scales": "model-00008-of-00013.safetensors",
 
524
  "model.layers.22.self_attn.v_proj.weight": "model-00008-of-00013.safetensors",
525
  "model.layers.23.input_layernorm.weight": "model-00009-of-00013.safetensors",
526
  "model.layers.23.mlp.experts.down_proj.bias": "model-00009-of-00013.safetensors",
 
527
  "model.layers.23.mlp.experts.down_proj.scales": "model-00009-of-00013.safetensors",
528
  "model.layers.23.mlp.experts.down_proj.weight": "model-00009-of-00013.safetensors",
529
+ "model.layers.23.mlp.experts.gate_proj.bias": "model-00008-of-00013.safetensors",
530
+ "model.layers.23.mlp.experts.gate_proj.scales": "model-00008-of-00013.safetensors",
531
+ "model.layers.23.mlp.experts.gate_proj.weight": "model-00008-of-00013.safetensors",
532
+ "model.layers.23.mlp.experts.up_proj.bias": "model-00008-of-00013.safetensors",
533
+ "model.layers.23.mlp.experts.up_proj.scales": "model-00008-of-00013.safetensors",
534
+ "model.layers.23.mlp.experts.up_proj.weight": "model-00008-of-00013.safetensors",
 
 
535
  "model.layers.23.mlp.router.bias": "model-00009-of-00013.safetensors",
536
  "model.layers.23.mlp.router.biases": "model-00009-of-00013.safetensors",
537
  "model.layers.23.mlp.router.scales": "model-00009-of-00013.safetensors",
538
  "model.layers.23.mlp.router.weight": "model-00009-of-00013.safetensors",
539
  "model.layers.23.post_attention_layernorm.weight": "model-00009-of-00013.safetensors",
540
+ "model.layers.23.self_attn.k_proj.bias": "model-00008-of-00013.safetensors",
541
+ "model.layers.23.self_attn.k_proj.biases": "model-00008-of-00013.safetensors",
542
+ "model.layers.23.self_attn.k_proj.scales": "model-00008-of-00013.safetensors",
543
+ "model.layers.23.self_attn.k_proj.weight": "model-00008-of-00013.safetensors",
544
+ "model.layers.23.self_attn.o_proj.bias": "model-00008-of-00013.safetensors",
545
+ "model.layers.23.self_attn.o_proj.biases": "model-00008-of-00013.safetensors",
546
+ "model.layers.23.self_attn.o_proj.scales": "model-00008-of-00013.safetensors",
547
+ "model.layers.23.self_attn.o_proj.weight": "model-00008-of-00013.safetensors",
548
+ "model.layers.23.self_attn.q_proj.bias": "model-00008-of-00013.safetensors",
549
+ "model.layers.23.self_attn.q_proj.biases": "model-00008-of-00013.safetensors",
550
+ "model.layers.23.self_attn.q_proj.scales": "model-00008-of-00013.safetensors",
551
+ "model.layers.23.self_attn.q_proj.weight": "model-00008-of-00013.safetensors",
552
+ "model.layers.23.self_attn.sinks": "model-00008-of-00013.safetensors",
553
+ "model.layers.23.self_attn.v_proj.bias": "model-00008-of-00013.safetensors",
554
+ "model.layers.23.self_attn.v_proj.biases": "model-00008-of-00013.safetensors",
555
+ "model.layers.23.self_attn.v_proj.scales": "model-00008-of-00013.safetensors",
556
+ "model.layers.23.self_attn.v_proj.weight": "model-00008-of-00013.safetensors",
557
  "model.layers.24.input_layernorm.weight": "model-00009-of-00013.safetensors",
558
  "model.layers.24.mlp.experts.down_proj.bias": "model-00009-of-00013.safetensors",
 
559
  "model.layers.24.mlp.experts.down_proj.scales": "model-00009-of-00013.safetensors",
560
  "model.layers.24.mlp.experts.down_proj.weight": "model-00009-of-00013.safetensors",
561
  "model.layers.24.mlp.experts.gate_proj.bias": "model-00009-of-00013.safetensors",
 
562
  "model.layers.24.mlp.experts.gate_proj.scales": "model-00009-of-00013.safetensors",
563
  "model.layers.24.mlp.experts.gate_proj.weight": "model-00009-of-00013.safetensors",
564
  "model.layers.24.mlp.experts.up_proj.bias": "model-00009-of-00013.safetensors",
 
565
  "model.layers.24.mlp.experts.up_proj.scales": "model-00009-of-00013.safetensors",
566
  "model.layers.24.mlp.experts.up_proj.weight": "model-00009-of-00013.safetensors",
567
  "model.layers.24.mlp.router.bias": "model-00009-of-00013.safetensors",
 
586
  "model.layers.24.self_attn.v_proj.biases": "model-00009-of-00013.safetensors",
587
  "model.layers.24.self_attn.v_proj.scales": "model-00009-of-00013.safetensors",
588
  "model.layers.24.self_attn.v_proj.weight": "model-00009-of-00013.safetensors",
589
+ "model.layers.25.input_layernorm.weight": "model-00009-of-00013.safetensors",
590
+ "model.layers.25.mlp.experts.down_proj.bias": "model-00009-of-00013.safetensors",
591
+ "model.layers.25.mlp.experts.down_proj.scales": "model-00009-of-00013.safetensors",
592
+ "model.layers.25.mlp.experts.down_proj.weight": "model-00009-of-00013.safetensors",
 
593
  "model.layers.25.mlp.experts.gate_proj.bias": "model-00009-of-00013.safetensors",
 
594
  "model.layers.25.mlp.experts.gate_proj.scales": "model-00009-of-00013.safetensors",
595
  "model.layers.25.mlp.experts.gate_proj.weight": "model-00009-of-00013.safetensors",
596
+ "model.layers.25.mlp.experts.up_proj.bias": "model-00009-of-00013.safetensors",
597
+ "model.layers.25.mlp.experts.up_proj.scales": "model-00009-of-00013.safetensors",
598
+ "model.layers.25.mlp.experts.up_proj.weight": "model-00009-of-00013.safetensors",
599
+ "model.layers.25.mlp.router.bias": "model-00009-of-00013.safetensors",
600
+ "model.layers.25.mlp.router.biases": "model-00009-of-00013.safetensors",
601
+ "model.layers.25.mlp.router.scales": "model-00009-of-00013.safetensors",
602
+ "model.layers.25.mlp.router.weight": "model-00009-of-00013.safetensors",
603
+ "model.layers.25.post_attention_layernorm.weight": "model-00009-of-00013.safetensors",
 
604
  "model.layers.25.self_attn.k_proj.bias": "model-00009-of-00013.safetensors",
605
  "model.layers.25.self_attn.k_proj.biases": "model-00009-of-00013.safetensors",
606
  "model.layers.25.self_attn.k_proj.scales": "model-00009-of-00013.safetensors",
 
620
  "model.layers.25.self_attn.v_proj.weight": "model-00009-of-00013.safetensors",
621
  "model.layers.26.input_layernorm.weight": "model-00010-of-00013.safetensors",
622
  "model.layers.26.mlp.experts.down_proj.bias": "model-00010-of-00013.safetensors",
 
623
  "model.layers.26.mlp.experts.down_proj.scales": "model-00010-of-00013.safetensors",
624
  "model.layers.26.mlp.experts.down_proj.weight": "model-00010-of-00013.safetensors",
625
+ "model.layers.26.mlp.experts.gate_proj.bias": "model-00009-of-00013.safetensors",
626
+ "model.layers.26.mlp.experts.gate_proj.scales": "model-00009-of-00013.safetensors",
627
+ "model.layers.26.mlp.experts.gate_proj.weight": "model-00009-of-00013.safetensors",
628
+ "model.layers.26.mlp.experts.up_proj.bias": "model-00009-of-00013.safetensors",
629
+ "model.layers.26.mlp.experts.up_proj.scales": "model-00009-of-00013.safetensors",
630
+ "model.layers.26.mlp.experts.up_proj.weight": "model-00009-of-00013.safetensors",
 
 
631
  "model.layers.26.mlp.router.bias": "model-00010-of-00013.safetensors",
632
  "model.layers.26.mlp.router.biases": "model-00010-of-00013.safetensors",
633
  "model.layers.26.mlp.router.scales": "model-00010-of-00013.safetensors",
634
  "model.layers.26.mlp.router.weight": "model-00010-of-00013.safetensors",
635
  "model.layers.26.post_attention_layernorm.weight": "model-00010-of-00013.safetensors",
636
+ "model.layers.26.self_attn.k_proj.bias": "model-00009-of-00013.safetensors",
637
+ "model.layers.26.self_attn.k_proj.biases": "model-00009-of-00013.safetensors",
638
+ "model.layers.26.self_attn.k_proj.scales": "model-00009-of-00013.safetensors",
639
+ "model.layers.26.self_attn.k_proj.weight": "model-00009-of-00013.safetensors",
640
+ "model.layers.26.self_attn.o_proj.bias": "model-00009-of-00013.safetensors",
641
+ "model.layers.26.self_attn.o_proj.biases": "model-00009-of-00013.safetensors",
642
+ "model.layers.26.self_attn.o_proj.scales": "model-00009-of-00013.safetensors",
643
+ "model.layers.26.self_attn.o_proj.weight": "model-00009-of-00013.safetensors",
644
+ "model.layers.26.self_attn.q_proj.bias": "model-00009-of-00013.safetensors",
645
+ "model.layers.26.self_attn.q_proj.biases": "model-00009-of-00013.safetensors",
646
+ "model.layers.26.self_attn.q_proj.scales": "model-00009-of-00013.safetensors",
647
+ "model.layers.26.self_attn.q_proj.weight": "model-00009-of-00013.safetensors",
648
+ "model.layers.26.self_attn.sinks": "model-00009-of-00013.safetensors",
649
+ "model.layers.26.self_attn.v_proj.bias": "model-00009-of-00013.safetensors",
650
+ "model.layers.26.self_attn.v_proj.biases": "model-00009-of-00013.safetensors",
651
+ "model.layers.26.self_attn.v_proj.scales": "model-00009-of-00013.safetensors",
652
+ "model.layers.26.self_attn.v_proj.weight": "model-00009-of-00013.safetensors",
653
  "model.layers.27.input_layernorm.weight": "model-00010-of-00013.safetensors",
654
  "model.layers.27.mlp.experts.down_proj.bias": "model-00010-of-00013.safetensors",
 
655
  "model.layers.27.mlp.experts.down_proj.scales": "model-00010-of-00013.safetensors",
656
  "model.layers.27.mlp.experts.down_proj.weight": "model-00010-of-00013.safetensors",
657
  "model.layers.27.mlp.experts.gate_proj.bias": "model-00010-of-00013.safetensors",
 
658
  "model.layers.27.mlp.experts.gate_proj.scales": "model-00010-of-00013.safetensors",
659
  "model.layers.27.mlp.experts.gate_proj.weight": "model-00010-of-00013.safetensors",
660
  "model.layers.27.mlp.experts.up_proj.bias": "model-00010-of-00013.safetensors",
 
661
  "model.layers.27.mlp.experts.up_proj.scales": "model-00010-of-00013.safetensors",
662
  "model.layers.27.mlp.experts.up_proj.weight": "model-00010-of-00013.safetensors",
663
  "model.layers.27.mlp.router.bias": "model-00010-of-00013.safetensors",
 
682
  "model.layers.27.self_attn.v_proj.biases": "model-00010-of-00013.safetensors",
683
  "model.layers.27.self_attn.v_proj.scales": "model-00010-of-00013.safetensors",
684
  "model.layers.27.self_attn.v_proj.weight": "model-00010-of-00013.safetensors",
685
+ "model.layers.28.input_layernorm.weight": "model-00010-of-00013.safetensors",
686
+ "model.layers.28.mlp.experts.down_proj.bias": "model-00010-of-00013.safetensors",
687
+ "model.layers.28.mlp.experts.down_proj.scales": "model-00010-of-00013.safetensors",
688
+ "model.layers.28.mlp.experts.down_proj.weight": "model-00010-of-00013.safetensors",
689
+ "model.layers.28.mlp.experts.gate_proj.bias": "model-00010-of-00013.safetensors",
690
+ "model.layers.28.mlp.experts.gate_proj.scales": "model-00010-of-00013.safetensors",
 
 
691
  "model.layers.28.mlp.experts.gate_proj.weight": "model-00010-of-00013.safetensors",
692
+ "model.layers.28.mlp.experts.up_proj.bias": "model-00010-of-00013.safetensors",
693
+ "model.layers.28.mlp.experts.up_proj.scales": "model-00010-of-00013.safetensors",
694
+ "model.layers.28.mlp.experts.up_proj.weight": "model-00010-of-00013.safetensors",
695
+ "model.layers.28.mlp.router.bias": "model-00010-of-00013.safetensors",
696
+ "model.layers.28.mlp.router.biases": "model-00010-of-00013.safetensors",
697
+ "model.layers.28.mlp.router.scales": "model-00010-of-00013.safetensors",
698
+ "model.layers.28.mlp.router.weight": "model-00010-of-00013.safetensors",
699
+ "model.layers.28.post_attention_layernorm.weight": "model-00010-of-00013.safetensors",
 
700
  "model.layers.28.self_attn.k_proj.bias": "model-00010-of-00013.safetensors",
701
  "model.layers.28.self_attn.k_proj.biases": "model-00010-of-00013.safetensors",
702
  "model.layers.28.self_attn.k_proj.scales": "model-00010-of-00013.safetensors",
 
716
  "model.layers.28.self_attn.v_proj.weight": "model-00010-of-00013.safetensors",
717
  "model.layers.29.input_layernorm.weight": "model-00011-of-00013.safetensors",
718
  "model.layers.29.mlp.experts.down_proj.bias": "model-00011-of-00013.safetensors",
 
719
  "model.layers.29.mlp.experts.down_proj.scales": "model-00011-of-00013.safetensors",
720
  "model.layers.29.mlp.experts.down_proj.weight": "model-00011-of-00013.safetensors",
721
+ "model.layers.29.mlp.experts.gate_proj.bias": "model-00010-of-00013.safetensors",
722
+ "model.layers.29.mlp.experts.gate_proj.scales": "model-00010-of-00013.safetensors",
723
+ "model.layers.29.mlp.experts.gate_proj.weight": "model-00010-of-00013.safetensors",
724
+ "model.layers.29.mlp.experts.up_proj.bias": "model-00010-of-00013.safetensors",
725
+ "model.layers.29.mlp.experts.up_proj.scales": "model-00010-of-00013.safetensors",
726
+ "model.layers.29.mlp.experts.up_proj.weight": "model-00010-of-00013.safetensors",
 
 
727
  "model.layers.29.mlp.router.bias": "model-00011-of-00013.safetensors",
728
  "model.layers.29.mlp.router.biases": "model-00011-of-00013.safetensors",
729
  "model.layers.29.mlp.router.scales": "model-00011-of-00013.safetensors",
730
  "model.layers.29.mlp.router.weight": "model-00011-of-00013.safetensors",
731
  "model.layers.29.post_attention_layernorm.weight": "model-00011-of-00013.safetensors",
732
+ "model.layers.29.self_attn.k_proj.bias": "model-00010-of-00013.safetensors",
733
+ "model.layers.29.self_attn.k_proj.biases": "model-00010-of-00013.safetensors",
734
+ "model.layers.29.self_attn.k_proj.scales": "model-00010-of-00013.safetensors",
735
+ "model.layers.29.self_attn.k_proj.weight": "model-00010-of-00013.safetensors",
736
+ "model.layers.29.self_attn.o_proj.bias": "model-00010-of-00013.safetensors",
737
+ "model.layers.29.self_attn.o_proj.biases": "model-00010-of-00013.safetensors",
738
+ "model.layers.29.self_attn.o_proj.scales": "model-00010-of-00013.safetensors",
739
+ "model.layers.29.self_attn.o_proj.weight": "model-00010-of-00013.safetensors",
740
+ "model.layers.29.self_attn.q_proj.bias": "model-00010-of-00013.safetensors",
741
+ "model.layers.29.self_attn.q_proj.biases": "model-00010-of-00013.safetensors",
742
+ "model.layers.29.self_attn.q_proj.scales": "model-00010-of-00013.safetensors",
743
+ "model.layers.29.self_attn.q_proj.weight": "model-00010-of-00013.safetensors",
744
+ "model.layers.29.self_attn.sinks": "model-00010-of-00013.safetensors",
745
+ "model.layers.29.self_attn.v_proj.bias": "model-00010-of-00013.safetensors",
746
+ "model.layers.29.self_attn.v_proj.biases": "model-00010-of-00013.safetensors",
747
+ "model.layers.29.self_attn.v_proj.scales": "model-00010-of-00013.safetensors",
748
+ "model.layers.29.self_attn.v_proj.weight": "model-00010-of-00013.safetensors",
749
  "model.layers.3.input_layernorm.weight": "model-00002-of-00013.safetensors",
750
  "model.layers.3.mlp.experts.down_proj.bias": "model-00002-of-00013.safetensors",
 
751
  "model.layers.3.mlp.experts.down_proj.scales": "model-00002-of-00013.safetensors",
752
  "model.layers.3.mlp.experts.down_proj.weight": "model-00002-of-00013.safetensors",
753
  "model.layers.3.mlp.experts.gate_proj.bias": "model-00002-of-00013.safetensors",
 
754
  "model.layers.3.mlp.experts.gate_proj.scales": "model-00002-of-00013.safetensors",
755
  "model.layers.3.mlp.experts.gate_proj.weight": "model-00002-of-00013.safetensors",
756
  "model.layers.3.mlp.experts.up_proj.bias": "model-00002-of-00013.safetensors",
 
757
  "model.layers.3.mlp.experts.up_proj.scales": "model-00002-of-00013.safetensors",
758
  "model.layers.3.mlp.experts.up_proj.weight": "model-00002-of-00013.safetensors",
759
  "model.layers.3.mlp.router.bias": "model-00002-of-00013.safetensors",
 
780
  "model.layers.3.self_attn.v_proj.weight": "model-00002-of-00013.safetensors",
781
  "model.layers.30.input_layernorm.weight": "model-00011-of-00013.safetensors",
782
  "model.layers.30.mlp.experts.down_proj.bias": "model-00011-of-00013.safetensors",
 
783
  "model.layers.30.mlp.experts.down_proj.scales": "model-00011-of-00013.safetensors",
784
  "model.layers.30.mlp.experts.down_proj.weight": "model-00011-of-00013.safetensors",
785
  "model.layers.30.mlp.experts.gate_proj.bias": "model-00011-of-00013.safetensors",
 
786
  "model.layers.30.mlp.experts.gate_proj.scales": "model-00011-of-00013.safetensors",
787
  "model.layers.30.mlp.experts.gate_proj.weight": "model-00011-of-00013.safetensors",
788
  "model.layers.30.mlp.experts.up_proj.bias": "model-00011-of-00013.safetensors",
 
789
  "model.layers.30.mlp.experts.up_proj.scales": "model-00011-of-00013.safetensors",
790
  "model.layers.30.mlp.experts.up_proj.weight": "model-00011-of-00013.safetensors",
791
  "model.layers.30.mlp.router.bias": "model-00011-of-00013.safetensors",
 
810
  "model.layers.30.self_attn.v_proj.biases": "model-00011-of-00013.safetensors",
811
  "model.layers.30.self_attn.v_proj.scales": "model-00011-of-00013.safetensors",
812
  "model.layers.30.self_attn.v_proj.weight": "model-00011-of-00013.safetensors",
813
+ "model.layers.31.input_layernorm.weight": "model-00011-of-00013.safetensors",
814
+ "model.layers.31.mlp.experts.down_proj.bias": "model-00011-of-00013.safetensors",
815
+ "model.layers.31.mlp.experts.down_proj.scales": "model-00011-of-00013.safetensors",
816
+ "model.layers.31.mlp.experts.down_proj.weight": "model-00011-of-00013.safetensors",
817
+ "model.layers.31.mlp.experts.gate_proj.bias": "model-00011-of-00013.safetensors",
818
+ "model.layers.31.mlp.experts.gate_proj.scales": "model-00011-of-00013.safetensors",
819
+ "model.layers.31.mlp.experts.gate_proj.weight": "model-00011-of-00013.safetensors",
820
+ "model.layers.31.mlp.experts.up_proj.bias": "model-00011-of-00013.safetensors",
821
+ "model.layers.31.mlp.experts.up_proj.scales": "model-00011-of-00013.safetensors",
822
+ "model.layers.31.mlp.experts.up_proj.weight": "model-00011-of-00013.safetensors",
823
+ "model.layers.31.mlp.router.bias": "model-00011-of-00013.safetensors",
824
+ "model.layers.31.mlp.router.biases": "model-00011-of-00013.safetensors",
825
+ "model.layers.31.mlp.router.scales": "model-00011-of-00013.safetensors",
826
+ "model.layers.31.mlp.router.weight": "model-00011-of-00013.safetensors",
827
+ "model.layers.31.post_attention_layernorm.weight": "model-00011-of-00013.safetensors",
 
 
 
828
  "model.layers.31.self_attn.k_proj.bias": "model-00011-of-00013.safetensors",
829
  "model.layers.31.self_attn.k_proj.biases": "model-00011-of-00013.safetensors",
830
  "model.layers.31.self_attn.k_proj.scales": "model-00011-of-00013.safetensors",
 
844
  "model.layers.31.self_attn.v_proj.weight": "model-00011-of-00013.safetensors",
845
  "model.layers.32.input_layernorm.weight": "model-00012-of-00013.safetensors",
846
  "model.layers.32.mlp.experts.down_proj.bias": "model-00012-of-00013.safetensors",
 
847
  "model.layers.32.mlp.experts.down_proj.scales": "model-00012-of-00013.safetensors",
848
  "model.layers.32.mlp.experts.down_proj.weight": "model-00012-of-00013.safetensors",
849
+ "model.layers.32.mlp.experts.gate_proj.bias": "model-00011-of-00013.safetensors",
850
+ "model.layers.32.mlp.experts.gate_proj.scales": "model-00011-of-00013.safetensors",
851
+ "model.layers.32.mlp.experts.gate_proj.weight": "model-00011-of-00013.safetensors",
852
+ "model.layers.32.mlp.experts.up_proj.bias": "model-00011-of-00013.safetensors",
853
+ "model.layers.32.mlp.experts.up_proj.scales": "model-00011-of-00013.safetensors",
854
+ "model.layers.32.mlp.experts.up_proj.weight": "model-00011-of-00013.safetensors",
 
 
855
  "model.layers.32.mlp.router.bias": "model-00012-of-00013.safetensors",
856
  "model.layers.32.mlp.router.biases": "model-00012-of-00013.safetensors",
857
  "model.layers.32.mlp.router.scales": "model-00012-of-00013.safetensors",
858
  "model.layers.32.mlp.router.weight": "model-00012-of-00013.safetensors",
859
  "model.layers.32.post_attention_layernorm.weight": "model-00012-of-00013.safetensors",
860
+ "model.layers.32.self_attn.k_proj.bias": "model-00011-of-00013.safetensors",
861
+ "model.layers.32.self_attn.k_proj.biases": "model-00011-of-00013.safetensors",
862
+ "model.layers.32.self_attn.k_proj.scales": "model-00011-of-00013.safetensors",
863
+ "model.layers.32.self_attn.k_proj.weight": "model-00011-of-00013.safetensors",
864
+ "model.layers.32.self_attn.o_proj.bias": "model-00011-of-00013.safetensors",
865
+ "model.layers.32.self_attn.o_proj.biases": "model-00011-of-00013.safetensors",
866
+ "model.layers.32.self_attn.o_proj.scales": "model-00011-of-00013.safetensors",
867
+ "model.layers.32.self_attn.o_proj.weight": "model-00011-of-00013.safetensors",
868
+ "model.layers.32.self_attn.q_proj.bias": "model-00011-of-00013.safetensors",
869
+ "model.layers.32.self_attn.q_proj.biases": "model-00011-of-00013.safetensors",
870
+ "model.layers.32.self_attn.q_proj.scales": "model-00011-of-00013.safetensors",
871
+ "model.layers.32.self_attn.q_proj.weight": "model-00011-of-00013.safetensors",
872
+ "model.layers.32.self_attn.sinks": "model-00011-of-00013.safetensors",
873
+ "model.layers.32.self_attn.v_proj.bias": "model-00011-of-00013.safetensors",
874
+ "model.layers.32.self_attn.v_proj.biases": "model-00011-of-00013.safetensors",
875
+ "model.layers.32.self_attn.v_proj.scales": "model-00011-of-00013.safetensors",
876
+ "model.layers.32.self_attn.v_proj.weight": "model-00011-of-00013.safetensors",
877
+ "model.layers.33.input_layernorm.weight": "model-00012-of-00013.safetensors",
878
+ "model.layers.33.mlp.experts.down_proj.bias": "model-00012-of-00013.safetensors",
879
+ "model.layers.33.mlp.experts.down_proj.scales": "model-00012-of-00013.safetensors",
 
880
  "model.layers.33.mlp.experts.down_proj.weight": "model-00012-of-00013.safetensors",
881
  "model.layers.33.mlp.experts.gate_proj.bias": "model-00012-of-00013.safetensors",
 
882
  "model.layers.33.mlp.experts.gate_proj.scales": "model-00012-of-00013.safetensors",
883
  "model.layers.33.mlp.experts.gate_proj.weight": "model-00012-of-00013.safetensors",
884
  "model.layers.33.mlp.experts.up_proj.bias": "model-00012-of-00013.safetensors",
 
885
  "model.layers.33.mlp.experts.up_proj.scales": "model-00012-of-00013.safetensors",
886
  "model.layers.33.mlp.experts.up_proj.weight": "model-00012-of-00013.safetensors",
887
+ "model.layers.33.mlp.router.bias": "model-00012-of-00013.safetensors",
888
+ "model.layers.33.mlp.router.biases": "model-00012-of-00013.safetensors",
889
+ "model.layers.33.mlp.router.scales": "model-00012-of-00013.safetensors",
890
+ "model.layers.33.mlp.router.weight": "model-00012-of-00013.safetensors",
891
+ "model.layers.33.post_attention_layernorm.weight": "model-00012-of-00013.safetensors",
892
  "model.layers.33.self_attn.k_proj.bias": "model-00012-of-00013.safetensors",
893
  "model.layers.33.self_attn.k_proj.biases": "model-00012-of-00013.safetensors",
894
  "model.layers.33.self_attn.k_proj.scales": "model-00012-of-00013.safetensors",
 
906
  "model.layers.33.self_attn.v_proj.biases": "model-00012-of-00013.safetensors",
907
  "model.layers.33.self_attn.v_proj.scales": "model-00012-of-00013.safetensors",
908
  "model.layers.33.self_attn.v_proj.weight": "model-00012-of-00013.safetensors",
909
+ "model.layers.34.input_layernorm.weight": "model-00012-of-00013.safetensors",
910
+ "model.layers.34.mlp.experts.down_proj.bias": "model-00012-of-00013.safetensors",
911
+ "model.layers.34.mlp.experts.down_proj.scales": "model-00012-of-00013.safetensors",
912
+ "model.layers.34.mlp.experts.down_proj.weight": "model-00012-of-00013.safetensors",
913
+ "model.layers.34.mlp.experts.gate_proj.bias": "model-00012-of-00013.safetensors",
914
+ "model.layers.34.mlp.experts.gate_proj.scales": "model-00012-of-00013.safetensors",
915
+ "model.layers.34.mlp.experts.gate_proj.weight": "model-00012-of-00013.safetensors",
916
+ "model.layers.34.mlp.experts.up_proj.bias": "model-00012-of-00013.safetensors",
917
+ "model.layers.34.mlp.experts.up_proj.scales": "model-00012-of-00013.safetensors",
918
+ "model.layers.34.mlp.experts.up_proj.weight": "model-00012-of-00013.safetensors",
919
+ "model.layers.34.mlp.router.bias": "model-00012-of-00013.safetensors",
920
+ "model.layers.34.mlp.router.biases": "model-00012-of-00013.safetensors",
921
+ "model.layers.34.mlp.router.scales": "model-00012-of-00013.safetensors",
922
+ "model.layers.34.mlp.router.weight": "model-00012-of-00013.safetensors",
923
+ "model.layers.34.post_attention_layernorm.weight": "model-00012-of-00013.safetensors",
924
+ "model.layers.34.self_attn.k_proj.bias": "model-00012-of-00013.safetensors",
925
+ "model.layers.34.self_attn.k_proj.biases": "model-00012-of-00013.safetensors",
926
+ "model.layers.34.self_attn.k_proj.scales": "model-00012-of-00013.safetensors",
927
+ "model.layers.34.self_attn.k_proj.weight": "model-00012-of-00013.safetensors",
928
+ "model.layers.34.self_attn.o_proj.bias": "model-00012-of-00013.safetensors",
929
+ "model.layers.34.self_attn.o_proj.biases": "model-00012-of-00013.safetensors",
930
+ "model.layers.34.self_attn.o_proj.scales": "model-00012-of-00013.safetensors",
931
+ "model.layers.34.self_attn.o_proj.weight": "model-00012-of-00013.safetensors",
932
+ "model.layers.34.self_attn.q_proj.bias": "model-00012-of-00013.safetensors",
933
+ "model.layers.34.self_attn.q_proj.biases": "model-00012-of-00013.safetensors",
934
+ "model.layers.34.self_attn.q_proj.scales": "model-00012-of-00013.safetensors",
935
+ "model.layers.34.self_attn.q_proj.weight": "model-00012-of-00013.safetensors",
936
+ "model.layers.34.self_attn.sinks": "model-00012-of-00013.safetensors",
937
+ "model.layers.34.self_attn.v_proj.bias": "model-00012-of-00013.safetensors",
938
+ "model.layers.34.self_attn.v_proj.biases": "model-00012-of-00013.safetensors",
939
+ "model.layers.34.self_attn.v_proj.scales": "model-00012-of-00013.safetensors",
940
+ "model.layers.34.self_attn.v_proj.weight": "model-00012-of-00013.safetensors",
 
 
 
941
  "model.layers.35.input_layernorm.weight": "model-00013-of-00013.safetensors",
942
  "model.layers.35.mlp.experts.down_proj.bias": "model-00013-of-00013.safetensors",
 
943
  "model.layers.35.mlp.experts.down_proj.scales": "model-00013-of-00013.safetensors",
944
  "model.layers.35.mlp.experts.down_proj.weight": "model-00013-of-00013.safetensors",
945
+ "model.layers.35.mlp.experts.gate_proj.bias": "model-00012-of-00013.safetensors",
946
+ "model.layers.35.mlp.experts.gate_proj.scales": "model-00012-of-00013.safetensors",
947
+ "model.layers.35.mlp.experts.gate_proj.weight": "model-00012-of-00013.safetensors",
948
+ "model.layers.35.mlp.experts.up_proj.bias": "model-00012-of-00013.safetensors",
949
+ "model.layers.35.mlp.experts.up_proj.scales": "model-00012-of-00013.safetensors",
950
+ "model.layers.35.mlp.experts.up_proj.weight": "model-00012-of-00013.safetensors",
 
 
951
  "model.layers.35.mlp.router.bias": "model-00013-of-00013.safetensors",
952
  "model.layers.35.mlp.router.biases": "model-00013-of-00013.safetensors",
953
  "model.layers.35.mlp.router.scales": "model-00013-of-00013.safetensors",
954
  "model.layers.35.mlp.router.weight": "model-00013-of-00013.safetensors",
955
  "model.layers.35.post_attention_layernorm.weight": "model-00013-of-00013.safetensors",
956
+ "model.layers.35.self_attn.k_proj.bias": "model-00012-of-00013.safetensors",
957
+ "model.layers.35.self_attn.k_proj.biases": "model-00012-of-00013.safetensors",
958
+ "model.layers.35.self_attn.k_proj.scales": "model-00012-of-00013.safetensors",
959
+ "model.layers.35.self_attn.k_proj.weight": "model-00012-of-00013.safetensors",
960
+ "model.layers.35.self_attn.o_proj.bias": "model-00012-of-00013.safetensors",
961
+ "model.layers.35.self_attn.o_proj.biases": "model-00012-of-00013.safetensors",
962
+ "model.layers.35.self_attn.o_proj.scales": "model-00012-of-00013.safetensors",
963
+ "model.layers.35.self_attn.o_proj.weight": "model-00012-of-00013.safetensors",
964
+ "model.layers.35.self_attn.q_proj.bias": "model-00012-of-00013.safetensors",
965
+ "model.layers.35.self_attn.q_proj.biases": "model-00012-of-00013.safetensors",
966
+ "model.layers.35.self_attn.q_proj.scales": "model-00012-of-00013.safetensors",
967
+ "model.layers.35.self_attn.q_proj.weight": "model-00012-of-00013.safetensors",
968
+ "model.layers.35.self_attn.sinks": "model-00012-of-00013.safetensors",
969
+ "model.layers.35.self_attn.v_proj.bias": "model-00012-of-00013.safetensors",
970
+ "model.layers.35.self_attn.v_proj.biases": "model-00012-of-00013.safetensors",
971
+ "model.layers.35.self_attn.v_proj.scales": "model-00012-of-00013.safetensors",
972
+ "model.layers.35.self_attn.v_proj.weight": "model-00012-of-00013.safetensors",
973
  "model.layers.4.input_layernorm.weight": "model-00002-of-00013.safetensors",
974
  "model.layers.4.mlp.experts.down_proj.bias": "model-00002-of-00013.safetensors",
 
975
  "model.layers.4.mlp.experts.down_proj.scales": "model-00002-of-00013.safetensors",
976
  "model.layers.4.mlp.experts.down_proj.weight": "model-00002-of-00013.safetensors",
977
  "model.layers.4.mlp.experts.gate_proj.bias": "model-00002-of-00013.safetensors",
 
978
  "model.layers.4.mlp.experts.gate_proj.scales": "model-00002-of-00013.safetensors",
979
  "model.layers.4.mlp.experts.gate_proj.weight": "model-00002-of-00013.safetensors",
980
  "model.layers.4.mlp.experts.up_proj.bias": "model-00002-of-00013.safetensors",
 
981
  "model.layers.4.mlp.experts.up_proj.scales": "model-00002-of-00013.safetensors",
982
  "model.layers.4.mlp.experts.up_proj.weight": "model-00002-of-00013.safetensors",
983
  "model.layers.4.mlp.router.bias": "model-00002-of-00013.safetensors",
 
1004
  "model.layers.4.self_attn.v_proj.weight": "model-00002-of-00013.safetensors",
1005
  "model.layers.5.input_layernorm.weight": "model-00003-of-00013.safetensors",
1006
  "model.layers.5.mlp.experts.down_proj.bias": "model-00003-of-00013.safetensors",
 
1007
  "model.layers.5.mlp.experts.down_proj.scales": "model-00003-of-00013.safetensors",
1008
  "model.layers.5.mlp.experts.down_proj.weight": "model-00003-of-00013.safetensors",
1009
  "model.layers.5.mlp.experts.gate_proj.bias": "model-00002-of-00013.safetensors",
 
1010
  "model.layers.5.mlp.experts.gate_proj.scales": "model-00002-of-00013.safetensors",
1011
  "model.layers.5.mlp.experts.gate_proj.weight": "model-00002-of-00013.safetensors",
1012
+ "model.layers.5.mlp.experts.up_proj.bias": "model-00002-of-00013.safetensors",
1013
+ "model.layers.5.mlp.experts.up_proj.scales": "model-00002-of-00013.safetensors",
 
1014
  "model.layers.5.mlp.experts.up_proj.weight": "model-00002-of-00013.safetensors",
1015
  "model.layers.5.mlp.router.bias": "model-00003-of-00013.safetensors",
1016
  "model.layers.5.mlp.router.biases": "model-00003-of-00013.safetensors",
 
1036
  "model.layers.5.self_attn.v_proj.weight": "model-00002-of-00013.safetensors",
1037
  "model.layers.6.input_layernorm.weight": "model-00003-of-00013.safetensors",
1038
  "model.layers.6.mlp.experts.down_proj.bias": "model-00003-of-00013.safetensors",
 
1039
  "model.layers.6.mlp.experts.down_proj.scales": "model-00003-of-00013.safetensors",
1040
  "model.layers.6.mlp.experts.down_proj.weight": "model-00003-of-00013.safetensors",
1041
  "model.layers.6.mlp.experts.gate_proj.bias": "model-00003-of-00013.safetensors",
 
1042
  "model.layers.6.mlp.experts.gate_proj.scales": "model-00003-of-00013.safetensors",
1043
  "model.layers.6.mlp.experts.gate_proj.weight": "model-00003-of-00013.safetensors",
1044
  "model.layers.6.mlp.experts.up_proj.bias": "model-00003-of-00013.safetensors",
 
1045
  "model.layers.6.mlp.experts.up_proj.scales": "model-00003-of-00013.safetensors",
1046
  "model.layers.6.mlp.experts.up_proj.weight": "model-00003-of-00013.safetensors",
1047
  "model.layers.6.mlp.router.bias": "model-00003-of-00013.safetensors",
 
1068
  "model.layers.6.self_attn.v_proj.weight": "model-00003-of-00013.safetensors",
1069
  "model.layers.7.input_layernorm.weight": "model-00003-of-00013.safetensors",
1070
  "model.layers.7.mlp.experts.down_proj.bias": "model-00003-of-00013.safetensors",
 
1071
  "model.layers.7.mlp.experts.down_proj.scales": "model-00003-of-00013.safetensors",
1072
  "model.layers.7.mlp.experts.down_proj.weight": "model-00003-of-00013.safetensors",
1073
  "model.layers.7.mlp.experts.gate_proj.bias": "model-00003-of-00013.safetensors",
 
1074
  "model.layers.7.mlp.experts.gate_proj.scales": "model-00003-of-00013.safetensors",
1075
  "model.layers.7.mlp.experts.gate_proj.weight": "model-00003-of-00013.safetensors",
1076
  "model.layers.7.mlp.experts.up_proj.bias": "model-00003-of-00013.safetensors",
 
1077
  "model.layers.7.mlp.experts.up_proj.scales": "model-00003-of-00013.safetensors",
1078
  "model.layers.7.mlp.experts.up_proj.weight": "model-00003-of-00013.safetensors",
1079
  "model.layers.7.mlp.router.bias": "model-00003-of-00013.safetensors",
 
1100
  "model.layers.7.self_attn.v_proj.weight": "model-00003-of-00013.safetensors",
1101
  "model.layers.8.input_layernorm.weight": "model-00004-of-00013.safetensors",
1102
  "model.layers.8.mlp.experts.down_proj.bias": "model-00004-of-00013.safetensors",
 
1103
  "model.layers.8.mlp.experts.down_proj.scales": "model-00004-of-00013.safetensors",
1104
  "model.layers.8.mlp.experts.down_proj.weight": "model-00004-of-00013.safetensors",
1105
  "model.layers.8.mlp.experts.gate_proj.bias": "model-00003-of-00013.safetensors",
 
1106
  "model.layers.8.mlp.experts.gate_proj.scales": "model-00003-of-00013.safetensors",
1107
  "model.layers.8.mlp.experts.gate_proj.weight": "model-00003-of-00013.safetensors",
1108
+ "model.layers.8.mlp.experts.up_proj.bias": "model-00003-of-00013.safetensors",
1109
+ "model.layers.8.mlp.experts.up_proj.scales": "model-00003-of-00013.safetensors",
1110
+ "model.layers.8.mlp.experts.up_proj.weight": "model-00003-of-00013.safetensors",
 
1111
  "model.layers.8.mlp.router.bias": "model-00004-of-00013.safetensors",
1112
  "model.layers.8.mlp.router.biases": "model-00004-of-00013.safetensors",
1113
  "model.layers.8.mlp.router.scales": "model-00004-of-00013.safetensors",
 
1132
  "model.layers.8.self_attn.v_proj.weight": "model-00003-of-00013.safetensors",
1133
  "model.layers.9.input_layernorm.weight": "model-00004-of-00013.safetensors",
1134
  "model.layers.9.mlp.experts.down_proj.bias": "model-00004-of-00013.safetensors",
 
1135
  "model.layers.9.mlp.experts.down_proj.scales": "model-00004-of-00013.safetensors",
1136
  "model.layers.9.mlp.experts.down_proj.weight": "model-00004-of-00013.safetensors",
1137
  "model.layers.9.mlp.experts.gate_proj.bias": "model-00004-of-00013.safetensors",
 
1138
  "model.layers.9.mlp.experts.gate_proj.scales": "model-00004-of-00013.safetensors",
1139
  "model.layers.9.mlp.experts.gate_proj.weight": "model-00004-of-00013.safetensors",
1140
  "model.layers.9.mlp.experts.up_proj.bias": "model-00004-of-00013.safetensors",
 
1141
  "model.layers.9.mlp.experts.up_proj.scales": "model-00004-of-00013.safetensors",
1142
  "model.layers.9.mlp.experts.up_proj.weight": "model-00004-of-00013.safetensors",
1143
  "model.layers.9.mlp.router.bias": "model-00004-of-00013.safetensors",