nbeerbower commited on
Commit
66c6158
·
verified ·
1 Parent(s): bdc9368

Upload folder using huggingface_hub

Browse files
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ tokenizer.json filter=lfs diff=lfs merge=lfs -text
README.md ADDED
@@ -0,0 +1,275 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ base_model:
3
+ - nbeerbower/mistral-nemo-kartoffel-12B
4
+ library_name: transformers
5
+ tags:
6
+ - mergekit
7
+ - merge
8
+
9
+ ---
10
+ # GigaPotato-Mistral-Nemo-24B
11
+
12
+ This is a merge of pre-trained language models created using [mergekit](https://github.com/cg123/mergekit).
13
+
14
+ ## Merge Details
15
+ ### Merge Method
16
+
17
+ This model was merged using the Passthrough merge method.
18
+
19
+ ### Models Merged
20
+
21
+ The following models were included in the merge:
22
+ * [nbeerbower/mistral-nemo-kartoffel-12B](https://huggingface.co/nbeerbower/mistral-nemo-kartoffel-12B)
23
+
24
+ ### Configuration
25
+
26
+ The following YAML configuration was used to produce this model:
27
+
28
+ ```yaml
29
+ slices:
30
+ - sources:
31
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
32
+ layer_range: [0, 1]
33
+ - sources:
34
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
35
+ layer_range: [0, 1]
36
+ - sources:
37
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
38
+ layer_range: [1, 2]
39
+ - sources:
40
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
41
+ layer_range: [1, 2]
42
+ - sources:
43
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
44
+ layer_range: [2, 3]
45
+ - sources:
46
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
47
+ layer_range: [2, 3]
48
+ - sources:
49
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
50
+ layer_range: [3, 4]
51
+ - sources:
52
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
53
+ layer_range: [3, 4]
54
+ - sources:
55
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
56
+ layer_range: [4, 5]
57
+ - sources:
58
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
59
+ layer_range: [4, 5]
60
+ - sources:
61
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
62
+ layer_range: [5, 6]
63
+ - sources:
64
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
65
+ layer_range: [5, 6]
66
+ - sources:
67
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
68
+ layer_range: [6, 7]
69
+ - sources:
70
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
71
+ layer_range: [6, 7]
72
+ - sources:
73
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
74
+ layer_range: [7, 8]
75
+ - sources:
76
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
77
+ layer_range: [7, 8]
78
+ - sources:
79
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
80
+ layer_range: [8, 9]
81
+ - sources:
82
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
83
+ layer_range: [8, 9]
84
+ - sources:
85
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
86
+ layer_range: [9, 10]
87
+ - sources:
88
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
89
+ layer_range: [9, 10]
90
+ - sources:
91
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
92
+ layer_range: [10, 11]
93
+ - sources:
94
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
95
+ layer_range: [10, 11]
96
+ - sources:
97
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
98
+ layer_range: [11, 12]
99
+ - sources:
100
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
101
+ layer_range: [11, 12]
102
+ - sources:
103
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
104
+ layer_range: [12, 13]
105
+ - sources:
106
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
107
+ layer_range: [12, 13]
108
+ - sources:
109
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
110
+ layer_range: [13, 14]
111
+ - sources:
112
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
113
+ layer_range: [13, 14]
114
+ - sources:
115
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
116
+ layer_range: [14, 15]
117
+ - sources:
118
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
119
+ layer_range: [14, 15]
120
+ - sources:
121
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
122
+ layer_range: [15, 16]
123
+ - sources:
124
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
125
+ layer_range: [15, 16]
126
+ - sources:
127
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
128
+ layer_range: [16, 17]
129
+ - sources:
130
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
131
+ layer_range: [16, 17]
132
+ - sources:
133
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
134
+ layer_range: [17, 18]
135
+ - sources:
136
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
137
+ layer_range: [17, 18]
138
+ - sources:
139
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
140
+ layer_range: [18, 19]
141
+ - sources:
142
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
143
+ layer_range: [18, 19]
144
+ - sources:
145
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
146
+ layer_range: [19, 20]
147
+ - sources:
148
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
149
+ layer_range: [19, 20]
150
+ - sources:
151
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
152
+ layer_range: [20, 21]
153
+ - sources:
154
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
155
+ layer_range: [20, 21]
156
+ - sources:
157
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
158
+ layer_range: [21, 22]
159
+ - sources:
160
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
161
+ layer_range: [21, 22]
162
+ - sources:
163
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
164
+ layer_range: [22, 23]
165
+ - sources:
166
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
167
+ layer_range: [22, 23]
168
+ - sources:
169
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
170
+ layer_range: [23, 24]
171
+ - sources:
172
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
173
+ layer_range: [23, 24]
174
+ - sources:
175
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
176
+ layer_range: [24, 25]
177
+ - sources:
178
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
179
+ layer_range: [24, 25]
180
+ - sources:
181
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
182
+ layer_range: [25, 26]
183
+ - sources:
184
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
185
+ layer_range: [25, 26]
186
+ - sources:
187
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
188
+ layer_range: [26, 27]
189
+ - sources:
190
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
191
+ layer_range: [26, 27]
192
+ - sources:
193
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
194
+ layer_range: [27, 28]
195
+ - sources:
196
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
197
+ layer_range: [27, 28]
198
+ - sources:
199
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
200
+ layer_range: [28, 29]
201
+ - sources:
202
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
203
+ layer_range: [28, 29]
204
+ - sources:
205
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
206
+ layer_range: [29, 30]
207
+ - sources:
208
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
209
+ layer_range: [29, 30]
210
+ - sources:
211
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
212
+ layer_range: [30, 31]
213
+ - sources:
214
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
215
+ layer_range: [30, 31]
216
+ - sources:
217
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
218
+ layer_range: [31, 32]
219
+ - sources:
220
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
221
+ layer_range: [31, 32]
222
+ - sources:
223
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
224
+ layer_range: [32, 33]
225
+ - sources:
226
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
227
+ layer_range: [32, 33]
228
+ - sources:
229
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
230
+ layer_range: [33, 34]
231
+ - sources:
232
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
233
+ layer_range: [33, 34]
234
+ - sources:
235
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
236
+ layer_range: [34, 35]
237
+ - sources:
238
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
239
+ layer_range: [34, 35]
240
+ - sources:
241
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
242
+ layer_range: [35, 36]
243
+ - sources:
244
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
245
+ layer_range: [35, 36]
246
+ - sources:
247
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
248
+ layer_range: [36, 37]
249
+ - sources:
250
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
251
+ layer_range: [36, 37]
252
+ - sources:
253
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
254
+ layer_range: [37, 38]
255
+ - sources:
256
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
257
+ layer_range: [37, 38]
258
+ - sources:
259
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
260
+ layer_range: [38, 39]
261
+ - sources:
262
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
263
+ layer_range: [38, 39]
264
+ - sources:
265
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
266
+ layer_range: [39, 40]
267
+ - sources:
268
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
269
+ layer_range: [39, 40]
270
+
271
+ merge_method: passthrough
272
+ dtype: bfloat16
273
+
274
+
275
+ ```
config.json ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "nbeerbower/mistral-nemo-kartoffel-12B",
3
+ "architectures": [
4
+ "MistralForCausalLM"
5
+ ],
6
+ "attention_dropout": 0.0,
7
+ "bos_token_id": 1,
8
+ "eos_token_id": 4,
9
+ "head_dim": 128,
10
+ "hidden_act": "silu",
11
+ "hidden_size": 5120,
12
+ "initializer_range": 0.02,
13
+ "intermediate_size": 14336,
14
+ "max_position_embeddings": 1024000,
15
+ "model_type": "mistral",
16
+ "num_attention_heads": 32,
17
+ "num_hidden_layers": 80,
18
+ "num_key_value_heads": 8,
19
+ "rms_norm_eps": 1e-05,
20
+ "rope_theta": 1000000.0,
21
+ "sliding_window": null,
22
+ "tie_word_embeddings": false,
23
+ "torch_dtype": "bfloat16",
24
+ "transformers_version": "4.48.3",
25
+ "use_cache": false,
26
+ "vocab_size": 131072
27
+ }
mergekit_config.yml ADDED
@@ -0,0 +1,245 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ slices:
2
+ - sources:
3
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
4
+ layer_range: [0, 1]
5
+ - sources:
6
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
7
+ layer_range: [0, 1]
8
+ - sources:
9
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
10
+ layer_range: [1, 2]
11
+ - sources:
12
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
13
+ layer_range: [1, 2]
14
+ - sources:
15
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
16
+ layer_range: [2, 3]
17
+ - sources:
18
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
19
+ layer_range: [2, 3]
20
+ - sources:
21
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
22
+ layer_range: [3, 4]
23
+ - sources:
24
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
25
+ layer_range: [3, 4]
26
+ - sources:
27
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
28
+ layer_range: [4, 5]
29
+ - sources:
30
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
31
+ layer_range: [4, 5]
32
+ - sources:
33
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
34
+ layer_range: [5, 6]
35
+ - sources:
36
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
37
+ layer_range: [5, 6]
38
+ - sources:
39
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
40
+ layer_range: [6, 7]
41
+ - sources:
42
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
43
+ layer_range: [6, 7]
44
+ - sources:
45
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
46
+ layer_range: [7, 8]
47
+ - sources:
48
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
49
+ layer_range: [7, 8]
50
+ - sources:
51
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
52
+ layer_range: [8, 9]
53
+ - sources:
54
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
55
+ layer_range: [8, 9]
56
+ - sources:
57
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
58
+ layer_range: [9, 10]
59
+ - sources:
60
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
61
+ layer_range: [9, 10]
62
+ - sources:
63
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
64
+ layer_range: [10, 11]
65
+ - sources:
66
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
67
+ layer_range: [10, 11]
68
+ - sources:
69
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
70
+ layer_range: [11, 12]
71
+ - sources:
72
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
73
+ layer_range: [11, 12]
74
+ - sources:
75
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
76
+ layer_range: [12, 13]
77
+ - sources:
78
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
79
+ layer_range: [12, 13]
80
+ - sources:
81
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
82
+ layer_range: [13, 14]
83
+ - sources:
84
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
85
+ layer_range: [13, 14]
86
+ - sources:
87
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
88
+ layer_range: [14, 15]
89
+ - sources:
90
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
91
+ layer_range: [14, 15]
92
+ - sources:
93
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
94
+ layer_range: [15, 16]
95
+ - sources:
96
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
97
+ layer_range: [15, 16]
98
+ - sources:
99
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
100
+ layer_range: [16, 17]
101
+ - sources:
102
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
103
+ layer_range: [16, 17]
104
+ - sources:
105
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
106
+ layer_range: [17, 18]
107
+ - sources:
108
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
109
+ layer_range: [17, 18]
110
+ - sources:
111
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
112
+ layer_range: [18, 19]
113
+ - sources:
114
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
115
+ layer_range: [18, 19]
116
+ - sources:
117
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
118
+ layer_range: [19, 20]
119
+ - sources:
120
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
121
+ layer_range: [19, 20]
122
+ - sources:
123
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
124
+ layer_range: [20, 21]
125
+ - sources:
126
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
127
+ layer_range: [20, 21]
128
+ - sources:
129
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
130
+ layer_range: [21, 22]
131
+ - sources:
132
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
133
+ layer_range: [21, 22]
134
+ - sources:
135
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
136
+ layer_range: [22, 23]
137
+ - sources:
138
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
139
+ layer_range: [22, 23]
140
+ - sources:
141
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
142
+ layer_range: [23, 24]
143
+ - sources:
144
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
145
+ layer_range: [23, 24]
146
+ - sources:
147
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
148
+ layer_range: [24, 25]
149
+ - sources:
150
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
151
+ layer_range: [24, 25]
152
+ - sources:
153
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
154
+ layer_range: [25, 26]
155
+ - sources:
156
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
157
+ layer_range: [25, 26]
158
+ - sources:
159
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
160
+ layer_range: [26, 27]
161
+ - sources:
162
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
163
+ layer_range: [26, 27]
164
+ - sources:
165
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
166
+ layer_range: [27, 28]
167
+ - sources:
168
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
169
+ layer_range: [27, 28]
170
+ - sources:
171
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
172
+ layer_range: [28, 29]
173
+ - sources:
174
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
175
+ layer_range: [28, 29]
176
+ - sources:
177
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
178
+ layer_range: [29, 30]
179
+ - sources:
180
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
181
+ layer_range: [29, 30]
182
+ - sources:
183
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
184
+ layer_range: [30, 31]
185
+ - sources:
186
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
187
+ layer_range: [30, 31]
188
+ - sources:
189
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
190
+ layer_range: [31, 32]
191
+ - sources:
192
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
193
+ layer_range: [31, 32]
194
+ - sources:
195
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
196
+ layer_range: [32, 33]
197
+ - sources:
198
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
199
+ layer_range: [32, 33]
200
+ - sources:
201
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
202
+ layer_range: [33, 34]
203
+ - sources:
204
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
205
+ layer_range: [33, 34]
206
+ - sources:
207
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
208
+ layer_range: [34, 35]
209
+ - sources:
210
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
211
+ layer_range: [34, 35]
212
+ - sources:
213
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
214
+ layer_range: [35, 36]
215
+ - sources:
216
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
217
+ layer_range: [35, 36]
218
+ - sources:
219
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
220
+ layer_range: [36, 37]
221
+ - sources:
222
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
223
+ layer_range: [36, 37]
224
+ - sources:
225
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
226
+ layer_range: [37, 38]
227
+ - sources:
228
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
229
+ layer_range: [37, 38]
230
+ - sources:
231
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
232
+ layer_range: [38, 39]
233
+ - sources:
234
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
235
+ layer_range: [38, 39]
236
+ - sources:
237
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
238
+ layer_range: [39, 40]
239
+ - sources:
240
+ - model: nbeerbower/mistral-nemo-kartoffel-12B
241
+ layer_range: [39, 40]
242
+
243
+ merge_method: passthrough
244
+ dtype: bfloat16
245
+
model-00001-of-00010.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f7e9d5115b3b2f428b26f5f75f4a0c03a1ec0deff7168013ef6b0e350b5bb3cb
3
+ size 4865499664
model-00002-of-00010.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d57f3aaf6962d6785a765f59fb2e60c05e55652b4a79d7006882376ea501dca2
3
+ size 4949451440
model-00003-of-00010.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9f8c8c77833f17953905a3b94a1be8d9a33c080ee21bdc8d46d4769c6b020c98
3
+ size 4865607480
model-00004-of-00010.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ef2cf50a42729584734c4537d0f61574d2bee4afeeb36e14c24d7b54cb0ebd87
3
+ size 4949451424
model-00005-of-00010.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0e1581b6ef1abeaf39a611f8201e73feec86773420b5eaf363b9f74f45bd2011
3
+ size 4865607480
model-00006-of-00010.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:486f9d83f5d227673f9a9fd7557fbf3746d3505158769810804adfe4811930e2
3
+ size 4949451424
model-00007-of-00010.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9bd0e34cbd29b6bb133c5d8cde5abe8ec6b008d3560bf8651cffdd95c5f9cdb8
3
+ size 4865607480
model-00008-of-00010.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:091ac37532dd2dd7df06f864e2b749110739581f4ec078f53316414987c9e66c
3
+ size 4949451440
model-00009-of-00010.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5306858f53eed40b9803f0c8dbd9215e164c153296cda1608e1395434d6aaa6c
3
+ size 4865607464
model-00010-of-00010.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e38f09ec13c576a751b5aedfd1c940e8d77919605e2a469f41a2f2b2fe439189
3
+ size 2181113840
model.safetensors.index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"metadata": {"mergekit_version": "0.1.0"}, "weight_map": {"lm_head.weight": "model-00001-of-00010.safetensors", "model.embed_tokens.weight": "model-00001-of-00010.safetensors", "model.layers.1.input_layernorm.weight": "model-00001-of-00010.safetensors", "model.layers.0.input_layernorm.weight": "model-00001-of-00010.safetensors", "model.layers.1.mlp.down_proj.weight": "model-00001-of-00010.safetensors", "model.layers.0.mlp.down_proj.weight": "model-00001-of-00010.safetensors", "model.layers.1.mlp.gate_proj.weight": "model-00001-of-00010.safetensors", "model.layers.0.mlp.gate_proj.weight": "model-00001-of-00010.safetensors", "model.layers.1.mlp.up_proj.weight": "model-00001-of-00010.safetensors", "model.layers.0.mlp.up_proj.weight": "model-00001-of-00010.safetensors", "model.layers.1.post_attention_layernorm.weight": "model-00001-of-00010.safetensors", "model.layers.0.post_attention_layernorm.weight": "model-00001-of-00010.safetensors", "model.layers.1.self_attn.k_proj.weight": "model-00001-of-00010.safetensors", "model.layers.0.self_attn.k_proj.weight": "model-00001-of-00010.safetensors", "model.layers.1.self_attn.o_proj.weight": "model-00001-of-00010.safetensors", "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00010.safetensors", "model.layers.1.self_attn.q_proj.weight": "model-00001-of-00010.safetensors", "model.layers.0.self_attn.q_proj.weight": "model-00001-of-00010.safetensors", "model.layers.1.self_attn.v_proj.weight": "model-00001-of-00010.safetensors", "model.layers.0.self_attn.v_proj.weight": "model-00001-of-00010.safetensors", "model.layers.3.input_layernorm.weight": "model-00001-of-00010.safetensors", "model.layers.2.input_layernorm.weight": "model-00001-of-00010.safetensors", "model.layers.3.mlp.down_proj.weight": "model-00001-of-00010.safetensors", "model.layers.2.mlp.down_proj.weight": "model-00001-of-00010.safetensors", "model.layers.3.mlp.gate_proj.weight": "model-00001-of-00010.safetensors", "model.layers.2.mlp.gate_proj.weight": "model-00001-of-00010.safetensors", "model.layers.3.mlp.up_proj.weight": "model-00001-of-00010.safetensors", "model.layers.2.mlp.up_proj.weight": "model-00001-of-00010.safetensors", "model.layers.3.post_attention_layernorm.weight": "model-00001-of-00010.safetensors", "model.layers.2.post_attention_layernorm.weight": "model-00001-of-00010.safetensors", "model.layers.3.self_attn.k_proj.weight": "model-00001-of-00010.safetensors", "model.layers.2.self_attn.k_proj.weight": "model-00001-of-00010.safetensors", "model.layers.3.self_attn.o_proj.weight": "model-00001-of-00010.safetensors", "model.layers.2.self_attn.o_proj.weight": "model-00001-of-00010.safetensors", "model.layers.3.self_attn.q_proj.weight": "model-00001-of-00010.safetensors", "model.layers.2.self_attn.q_proj.weight": "model-00001-of-00010.safetensors", "model.layers.3.self_attn.v_proj.weight": "model-00001-of-00010.safetensors", "model.layers.2.self_attn.v_proj.weight": "model-00001-of-00010.safetensors", "model.layers.21.input_layernorm.weight": "model-00001-of-00010.safetensors", "model.layers.20.input_layernorm.weight": "model-00001-of-00010.safetensors", "model.layers.21.mlp.down_proj.weight": "model-00002-of-00010.safetensors", "model.layers.20.mlp.down_proj.weight": "model-00002-of-00010.safetensors", "model.layers.21.mlp.gate_proj.weight": "model-00002-of-00010.safetensors", "model.layers.20.mlp.gate_proj.weight": "model-00002-of-00010.safetensors", "model.layers.21.mlp.up_proj.weight": "model-00002-of-00010.safetensors", "model.layers.20.mlp.up_proj.weight": "model-00002-of-00010.safetensors", "model.layers.21.post_attention_layernorm.weight": "model-00002-of-00010.safetensors", "model.layers.20.post_attention_layernorm.weight": "model-00002-of-00010.safetensors", "model.layers.21.self_attn.k_proj.weight": "model-00002-of-00010.safetensors", "model.layers.20.self_attn.k_proj.weight": "model-00002-of-00010.safetensors", "model.layers.21.self_attn.o_proj.weight": "model-00002-of-00010.safetensors", "model.layers.20.self_attn.o_proj.weight": "model-00002-of-00010.safetensors", "model.layers.21.self_attn.q_proj.weight": "model-00002-of-00010.safetensors", "model.layers.20.self_attn.q_proj.weight": "model-00002-of-00010.safetensors", "model.layers.21.self_attn.v_proj.weight": "model-00002-of-00010.safetensors", "model.layers.20.self_attn.v_proj.weight": "model-00002-of-00010.safetensors", "model.layers.23.input_layernorm.weight": "model-00002-of-00010.safetensors", "model.layers.22.input_layernorm.weight": "model-00002-of-00010.safetensors", "model.layers.23.mlp.down_proj.weight": "model-00002-of-00010.safetensors", "model.layers.22.mlp.down_proj.weight": "model-00002-of-00010.safetensors", "model.layers.23.mlp.gate_proj.weight": "model-00002-of-00010.safetensors", "model.layers.22.mlp.gate_proj.weight": "model-00002-of-00010.safetensors", "model.layers.23.mlp.up_proj.weight": "model-00002-of-00010.safetensors", "model.layers.22.mlp.up_proj.weight": "model-00002-of-00010.safetensors", "model.layers.23.post_attention_layernorm.weight": "model-00002-of-00010.safetensors", "model.layers.22.post_attention_layernorm.weight": "model-00002-of-00010.safetensors", "model.layers.23.self_attn.k_proj.weight": "model-00002-of-00010.safetensors", "model.layers.22.self_attn.k_proj.weight": "model-00002-of-00010.safetensors", "model.layers.23.self_attn.o_proj.weight": "model-00002-of-00010.safetensors", "model.layers.22.self_attn.o_proj.weight": "model-00002-of-00010.safetensors", "model.layers.23.self_attn.q_proj.weight": "model-00002-of-00010.safetensors", "model.layers.22.self_attn.q_proj.weight": "model-00002-of-00010.safetensors", "model.layers.23.self_attn.v_proj.weight": "model-00002-of-00010.safetensors", "model.layers.22.self_attn.v_proj.weight": "model-00002-of-00010.safetensors", "model.layers.25.input_layernorm.weight": "model-00002-of-00010.safetensors", "model.layers.24.input_layernorm.weight": "model-00002-of-00010.safetensors", "model.layers.25.mlp.down_proj.weight": "model-00002-of-00010.safetensors", "model.layers.24.mlp.down_proj.weight": "model-00002-of-00010.safetensors", "model.layers.25.mlp.gate_proj.weight": "model-00002-of-00010.safetensors", "model.layers.24.mlp.gate_proj.weight": "model-00002-of-00010.safetensors", "model.layers.25.mlp.up_proj.weight": "model-00002-of-00010.safetensors", "model.layers.24.mlp.up_proj.weight": "model-00002-of-00010.safetensors", "model.layers.25.post_attention_layernorm.weight": "model-00002-of-00010.safetensors", "model.layers.24.post_attention_layernorm.weight": "model-00002-of-00010.safetensors", "model.layers.25.self_attn.k_proj.weight": "model-00002-of-00010.safetensors", "model.layers.24.self_attn.k_proj.weight": "model-00002-of-00010.safetensors", "model.layers.25.self_attn.o_proj.weight": "model-00002-of-00010.safetensors", "model.layers.24.self_attn.o_proj.weight": "model-00002-of-00010.safetensors", "model.layers.25.self_attn.q_proj.weight": "model-00002-of-00010.safetensors", "model.layers.24.self_attn.q_proj.weight": "model-00002-of-00010.safetensors", "model.layers.25.self_attn.v_proj.weight": "model-00002-of-00010.safetensors", "model.layers.24.self_attn.v_proj.weight": "model-00002-of-00010.safetensors", "model.layers.27.input_layernorm.weight": "model-00002-of-00010.safetensors", "model.layers.26.input_layernorm.weight": "model-00002-of-00010.safetensors", "model.layers.27.mlp.down_proj.weight": "model-00002-of-00010.safetensors", "model.layers.26.mlp.down_proj.weight": "model-00002-of-00010.safetensors", "model.layers.27.mlp.gate_proj.weight": "model-00002-of-00010.safetensors", "model.layers.26.mlp.gate_proj.weight": "model-00002-of-00010.safetensors", "model.layers.27.mlp.up_proj.weight": "model-00002-of-00010.safetensors", "model.layers.26.mlp.up_proj.weight": "model-00002-of-00010.safetensors", "model.layers.27.post_attention_layernorm.weight": "model-00002-of-00010.safetensors", "model.layers.26.post_attention_layernorm.weight": "model-00002-of-00010.safetensors", "model.layers.27.self_attn.k_proj.weight": "model-00002-of-00010.safetensors", "model.layers.26.self_attn.k_proj.weight": "model-00002-of-00010.safetensors", "model.layers.27.self_attn.o_proj.weight": "model-00002-of-00010.safetensors", "model.layers.26.self_attn.o_proj.weight": "model-00002-of-00010.safetensors", "model.layers.27.self_attn.q_proj.weight": "model-00002-of-00010.safetensors", "model.layers.26.self_attn.q_proj.weight": "model-00002-of-00010.safetensors", "model.layers.27.self_attn.v_proj.weight": "model-00002-of-00010.safetensors", "model.layers.26.self_attn.v_proj.weight": "model-00002-of-00010.safetensors", "model.layers.29.input_layernorm.weight": "model-00002-of-00010.safetensors", "model.layers.28.input_layernorm.weight": "model-00002-of-00010.safetensors", "model.layers.29.mlp.down_proj.weight": "model-00002-of-00010.safetensors", "model.layers.28.mlp.down_proj.weight": "model-00002-of-00010.safetensors", "model.layers.29.mlp.gate_proj.weight": "model-00002-of-00010.safetensors", "model.layers.28.mlp.gate_proj.weight": "model-00002-of-00010.safetensors", "model.layers.29.mlp.up_proj.weight": "model-00003-of-00010.safetensors", "model.layers.28.mlp.up_proj.weight": "model-00003-of-00010.safetensors", "model.layers.29.post_attention_layernorm.weight": "model-00003-of-00010.safetensors", "model.layers.28.post_attention_layernorm.weight": "model-00003-of-00010.safetensors", "model.layers.29.self_attn.k_proj.weight": "model-00003-of-00010.safetensors", "model.layers.28.self_attn.k_proj.weight": "model-00003-of-00010.safetensors", "model.layers.29.self_attn.o_proj.weight": "model-00003-of-00010.safetensors", "model.layers.28.self_attn.o_proj.weight": "model-00003-of-00010.safetensors", "model.layers.29.self_attn.q_proj.weight": "model-00003-of-00010.safetensors", "model.layers.28.self_attn.q_proj.weight": "model-00003-of-00010.safetensors", "model.layers.29.self_attn.v_proj.weight": "model-00003-of-00010.safetensors", "model.layers.28.self_attn.v_proj.weight": "model-00003-of-00010.safetensors", "model.layers.31.input_layernorm.weight": "model-00003-of-00010.safetensors", "model.layers.30.input_layernorm.weight": "model-00003-of-00010.safetensors", "model.layers.31.mlp.down_proj.weight": "model-00003-of-00010.safetensors", "model.layers.30.mlp.down_proj.weight": "model-00003-of-00010.safetensors", "model.layers.31.mlp.gate_proj.weight": "model-00003-of-00010.safetensors", "model.layers.30.mlp.gate_proj.weight": "model-00003-of-00010.safetensors", "model.layers.31.mlp.up_proj.weight": "model-00003-of-00010.safetensors", "model.layers.30.mlp.up_proj.weight": "model-00003-of-00010.safetensors", "model.layers.31.post_attention_layernorm.weight": "model-00003-of-00010.safetensors", "model.layers.30.post_attention_layernorm.weight": "model-00003-of-00010.safetensors", "model.layers.31.self_attn.k_proj.weight": "model-00003-of-00010.safetensors", "model.layers.30.self_attn.k_proj.weight": "model-00003-of-00010.safetensors", "model.layers.31.self_attn.o_proj.weight": "model-00003-of-00010.safetensors", "model.layers.30.self_attn.o_proj.weight": "model-00003-of-00010.safetensors", "model.layers.31.self_attn.q_proj.weight": "model-00003-of-00010.safetensors", "model.layers.30.self_attn.q_proj.weight": "model-00003-of-00010.safetensors", "model.layers.31.self_attn.v_proj.weight": "model-00003-of-00010.safetensors", "model.layers.30.self_attn.v_proj.weight": "model-00003-of-00010.safetensors", "model.layers.33.input_layernorm.weight": "model-00003-of-00010.safetensors", "model.layers.32.input_layernorm.weight": "model-00003-of-00010.safetensors", "model.layers.33.mlp.down_proj.weight": "model-00003-of-00010.safetensors", "model.layers.32.mlp.down_proj.weight": "model-00003-of-00010.safetensors", "model.layers.33.mlp.gate_proj.weight": "model-00003-of-00010.safetensors", "model.layers.32.mlp.gate_proj.weight": "model-00003-of-00010.safetensors", "model.layers.33.mlp.up_proj.weight": "model-00003-of-00010.safetensors", "model.layers.32.mlp.up_proj.weight": "model-00003-of-00010.safetensors", "model.layers.33.post_attention_layernorm.weight": "model-00003-of-00010.safetensors", "model.layers.32.post_attention_layernorm.weight": "model-00003-of-00010.safetensors", "model.layers.33.self_attn.k_proj.weight": "model-00003-of-00010.safetensors", "model.layers.32.self_attn.k_proj.weight": "model-00003-of-00010.safetensors", "model.layers.33.self_attn.o_proj.weight": "model-00003-of-00010.safetensors", "model.layers.32.self_attn.o_proj.weight": "model-00003-of-00010.safetensors", "model.layers.33.self_attn.q_proj.weight": "model-00003-of-00010.safetensors", "model.layers.32.self_attn.q_proj.weight": "model-00003-of-00010.safetensors", "model.layers.33.self_attn.v_proj.weight": "model-00003-of-00010.safetensors", "model.layers.32.self_attn.v_proj.weight": "model-00003-of-00010.safetensors", "model.layers.35.input_layernorm.weight": "model-00003-of-00010.safetensors", "model.layers.34.input_layernorm.weight": "model-00003-of-00010.safetensors", "model.layers.35.mlp.down_proj.weight": "model-00003-of-00010.safetensors", "model.layers.34.mlp.down_proj.weight": "model-00003-of-00010.safetensors", "model.layers.35.mlp.gate_proj.weight": "model-00003-of-00010.safetensors", "model.layers.34.mlp.gate_proj.weight": "model-00003-of-00010.safetensors", "model.layers.35.mlp.up_proj.weight": "model-00003-of-00010.safetensors", "model.layers.34.mlp.up_proj.weight": "model-00003-of-00010.safetensors", "model.layers.35.post_attention_layernorm.weight": "model-00003-of-00010.safetensors", "model.layers.34.post_attention_layernorm.weight": "model-00003-of-00010.safetensors", "model.layers.35.self_attn.k_proj.weight": "model-00003-of-00010.safetensors", "model.layers.34.self_attn.k_proj.weight": "model-00003-of-00010.safetensors", "model.layers.35.self_attn.o_proj.weight": "model-00003-of-00010.safetensors", "model.layers.34.self_attn.o_proj.weight": "model-00003-of-00010.safetensors", "model.layers.35.self_attn.q_proj.weight": "model-00003-of-00010.safetensors", "model.layers.34.self_attn.q_proj.weight": "model-00003-of-00010.safetensors", "model.layers.35.self_attn.v_proj.weight": "model-00003-of-00010.safetensors", "model.layers.34.self_attn.v_proj.weight": "model-00003-of-00010.safetensors", "model.layers.37.input_layernorm.weight": "model-00003-of-00010.safetensors", "model.layers.36.input_layernorm.weight": "model-00003-of-00010.safetensors", "model.layers.37.mlp.down_proj.weight": "model-00003-of-00010.safetensors", "model.layers.36.mlp.down_proj.weight": "model-00003-of-00010.safetensors", "model.layers.37.mlp.gate_proj.weight": "model-00003-of-00010.safetensors", "model.layers.36.mlp.gate_proj.weight": "model-00003-of-00010.safetensors", "model.layers.37.mlp.up_proj.weight": "model-00003-of-00010.safetensors", "model.layers.36.mlp.up_proj.weight": "model-00003-of-00010.safetensors", "model.layers.37.post_attention_layernorm.weight": "model-00003-of-00010.safetensors", "model.layers.36.post_attention_layernorm.weight": "model-00003-of-00010.safetensors", "model.layers.37.self_attn.k_proj.weight": "model-00003-of-00010.safetensors", "model.layers.36.self_attn.k_proj.weight": "model-00003-of-00010.safetensors", "model.layers.37.self_attn.o_proj.weight": "model-00003-of-00010.safetensors", "model.layers.36.self_attn.o_proj.weight": "model-00003-of-00010.safetensors", "model.layers.37.self_attn.q_proj.weight": "model-00003-of-00010.safetensors", "model.layers.36.self_attn.q_proj.weight": "model-00003-of-00010.safetensors", "model.layers.37.self_attn.v_proj.weight": "model-00003-of-00010.safetensors", "model.layers.36.self_attn.v_proj.weight": "model-00003-of-00010.safetensors", "model.layers.39.input_layernorm.weight": "model-00003-of-00010.safetensors", "model.layers.38.input_layernorm.weight": "model-00003-of-00010.safetensors", "model.layers.39.mlp.down_proj.weight": "model-00004-of-00010.safetensors", "model.layers.38.mlp.down_proj.weight": "model-00004-of-00010.safetensors", "model.layers.39.mlp.gate_proj.weight": "model-00004-of-00010.safetensors", "model.layers.38.mlp.gate_proj.weight": "model-00004-of-00010.safetensors", "model.layers.39.mlp.up_proj.weight": "model-00004-of-00010.safetensors", "model.layers.38.mlp.up_proj.weight": "model-00004-of-00010.safetensors", "model.layers.39.post_attention_layernorm.weight": "model-00004-of-00010.safetensors", "model.layers.38.post_attention_layernorm.weight": "model-00004-of-00010.safetensors", "model.layers.39.self_attn.k_proj.weight": "model-00004-of-00010.safetensors", "model.layers.38.self_attn.k_proj.weight": "model-00004-of-00010.safetensors", "model.layers.39.self_attn.o_proj.weight": "model-00004-of-00010.safetensors", "model.layers.38.self_attn.o_proj.weight": "model-00004-of-00010.safetensors", "model.layers.39.self_attn.q_proj.weight": "model-00004-of-00010.safetensors", "model.layers.38.self_attn.q_proj.weight": "model-00004-of-00010.safetensors", "model.layers.39.self_attn.v_proj.weight": "model-00004-of-00010.safetensors", "model.layers.38.self_attn.v_proj.weight": "model-00004-of-00010.safetensors", "model.layers.5.input_layernorm.weight": "model-00004-of-00010.safetensors", "model.layers.4.input_layernorm.weight": "model-00004-of-00010.safetensors", "model.layers.5.mlp.down_proj.weight": "model-00004-of-00010.safetensors", "model.layers.4.mlp.down_proj.weight": "model-00004-of-00010.safetensors", "model.layers.5.mlp.gate_proj.weight": "model-00004-of-00010.safetensors", "model.layers.4.mlp.gate_proj.weight": "model-00004-of-00010.safetensors", "model.layers.5.mlp.up_proj.weight": "model-00004-of-00010.safetensors", "model.layers.4.mlp.up_proj.weight": "model-00004-of-00010.safetensors", "model.layers.5.post_attention_layernorm.weight": "model-00004-of-00010.safetensors", "model.layers.4.post_attention_layernorm.weight": "model-00004-of-00010.safetensors", "model.layers.5.self_attn.k_proj.weight": "model-00004-of-00010.safetensors", "model.layers.4.self_attn.k_proj.weight": "model-00004-of-00010.safetensors", "model.layers.5.self_attn.o_proj.weight": "model-00004-of-00010.safetensors", "model.layers.4.self_attn.o_proj.weight": "model-00004-of-00010.safetensors", "model.layers.5.self_attn.q_proj.weight": "model-00004-of-00010.safetensors", "model.layers.4.self_attn.q_proj.weight": "model-00004-of-00010.safetensors", "model.layers.5.self_attn.v_proj.weight": "model-00004-of-00010.safetensors", "model.layers.4.self_attn.v_proj.weight": "model-00004-of-00010.safetensors", "model.layers.41.input_layernorm.weight": "model-00004-of-00010.safetensors", "model.layers.40.input_layernorm.weight": "model-00004-of-00010.safetensors", "model.layers.41.mlp.down_proj.weight": "model-00004-of-00010.safetensors", "model.layers.40.mlp.down_proj.weight": "model-00004-of-00010.safetensors", "model.layers.41.mlp.gate_proj.weight": "model-00004-of-00010.safetensors", "model.layers.40.mlp.gate_proj.weight": "model-00004-of-00010.safetensors", "model.layers.41.mlp.up_proj.weight": "model-00004-of-00010.safetensors", "model.layers.40.mlp.up_proj.weight": "model-00004-of-00010.safetensors", "model.layers.41.post_attention_layernorm.weight": "model-00004-of-00010.safetensors", "model.layers.40.post_attention_layernorm.weight": "model-00004-of-00010.safetensors", "model.layers.41.self_attn.k_proj.weight": "model-00004-of-00010.safetensors", "model.layers.40.self_attn.k_proj.weight": "model-00004-of-00010.safetensors", "model.layers.41.self_attn.o_proj.weight": "model-00004-of-00010.safetensors", "model.layers.40.self_attn.o_proj.weight": "model-00004-of-00010.safetensors", "model.layers.41.self_attn.q_proj.weight": "model-00004-of-00010.safetensors", "model.layers.40.self_attn.q_proj.weight": "model-00004-of-00010.safetensors", "model.layers.41.self_attn.v_proj.weight": "model-00004-of-00010.safetensors", "model.layers.40.self_attn.v_proj.weight": "model-00004-of-00010.safetensors", "model.layers.43.input_layernorm.weight": "model-00004-of-00010.safetensors", "model.layers.42.input_layernorm.weight": "model-00004-of-00010.safetensors", "model.layers.43.mlp.down_proj.weight": "model-00004-of-00010.safetensors", "model.layers.42.mlp.down_proj.weight": "model-00004-of-00010.safetensors", "model.layers.43.mlp.gate_proj.weight": "model-00004-of-00010.safetensors", "model.layers.42.mlp.gate_proj.weight": "model-00004-of-00010.safetensors", "model.layers.43.mlp.up_proj.weight": "model-00004-of-00010.safetensors", "model.layers.42.mlp.up_proj.weight": "model-00004-of-00010.safetensors", "model.layers.43.post_attention_layernorm.weight": "model-00004-of-00010.safetensors", "model.layers.42.post_attention_layernorm.weight": "model-00004-of-00010.safetensors", "model.layers.43.self_attn.k_proj.weight": "model-00004-of-00010.safetensors", "model.layers.42.self_attn.k_proj.weight": "model-00004-of-00010.safetensors", "model.layers.43.self_attn.o_proj.weight": "model-00004-of-00010.safetensors", "model.layers.42.self_attn.o_proj.weight": "model-00004-of-00010.safetensors", "model.layers.43.self_attn.q_proj.weight": "model-00004-of-00010.safetensors", "model.layers.42.self_attn.q_proj.weight": "model-00004-of-00010.safetensors", "model.layers.43.self_attn.v_proj.weight": "model-00004-of-00010.safetensors", "model.layers.42.self_attn.v_proj.weight": "model-00004-of-00010.safetensors", "model.layers.45.input_layernorm.weight": "model-00004-of-00010.safetensors", "model.layers.44.input_layernorm.weight": "model-00004-of-00010.safetensors", "model.layers.45.mlp.down_proj.weight": "model-00004-of-00010.safetensors", "model.layers.44.mlp.down_proj.weight": "model-00004-of-00010.safetensors", "model.layers.45.mlp.gate_proj.weight": "model-00004-of-00010.safetensors", "model.layers.44.mlp.gate_proj.weight": "model-00004-of-00010.safetensors", "model.layers.45.mlp.up_proj.weight": "model-00005-of-00010.safetensors", "model.layers.44.mlp.up_proj.weight": "model-00005-of-00010.safetensors", "model.layers.45.post_attention_layernorm.weight": "model-00005-of-00010.safetensors", "model.layers.44.post_attention_layernorm.weight": "model-00005-of-00010.safetensors", "model.layers.45.self_attn.k_proj.weight": "model-00005-of-00010.safetensors", "model.layers.44.self_attn.k_proj.weight": "model-00005-of-00010.safetensors", "model.layers.45.self_attn.o_proj.weight": "model-00005-of-00010.safetensors", "model.layers.44.self_attn.o_proj.weight": "model-00005-of-00010.safetensors", "model.layers.45.self_attn.q_proj.weight": "model-00005-of-00010.safetensors", "model.layers.44.self_attn.q_proj.weight": "model-00005-of-00010.safetensors", "model.layers.45.self_attn.v_proj.weight": "model-00005-of-00010.safetensors", "model.layers.44.self_attn.v_proj.weight": "model-00005-of-00010.safetensors", "model.layers.47.input_layernorm.weight": "model-00005-of-00010.safetensors", "model.layers.46.input_layernorm.weight": "model-00005-of-00010.safetensors", "model.layers.47.mlp.down_proj.weight": "model-00005-of-00010.safetensors", "model.layers.46.mlp.down_proj.weight": "model-00005-of-00010.safetensors", "model.layers.47.mlp.gate_proj.weight": "model-00005-of-00010.safetensors", "model.layers.46.mlp.gate_proj.weight": "model-00005-of-00010.safetensors", "model.layers.47.mlp.up_proj.weight": "model-00005-of-00010.safetensors", "model.layers.46.mlp.up_proj.weight": "model-00005-of-00010.safetensors", "model.layers.47.post_attention_layernorm.weight": "model-00005-of-00010.safetensors", "model.layers.46.post_attention_layernorm.weight": "model-00005-of-00010.safetensors", "model.layers.47.self_attn.k_proj.weight": "model-00005-of-00010.safetensors", "model.layers.46.self_attn.k_proj.weight": "model-00005-of-00010.safetensors", "model.layers.47.self_attn.o_proj.weight": "model-00005-of-00010.safetensors", "model.layers.46.self_attn.o_proj.weight": "model-00005-of-00010.safetensors", "model.layers.47.self_attn.q_proj.weight": "model-00005-of-00010.safetensors", "model.layers.46.self_attn.q_proj.weight": "model-00005-of-00010.safetensors", "model.layers.47.self_attn.v_proj.weight": "model-00005-of-00010.safetensors", "model.layers.46.self_attn.v_proj.weight": "model-00005-of-00010.safetensors", "model.layers.49.input_layernorm.weight": "model-00005-of-00010.safetensors", "model.layers.48.input_layernorm.weight": "model-00005-of-00010.safetensors", "model.layers.49.mlp.down_proj.weight": "model-00005-of-00010.safetensors", "model.layers.48.mlp.down_proj.weight": "model-00005-of-00010.safetensors", "model.layers.49.mlp.gate_proj.weight": "model-00005-of-00010.safetensors", "model.layers.48.mlp.gate_proj.weight": "model-00005-of-00010.safetensors", "model.layers.49.mlp.up_proj.weight": "model-00005-of-00010.safetensors", "model.layers.48.mlp.up_proj.weight": "model-00005-of-00010.safetensors", "model.layers.49.post_attention_layernorm.weight": "model-00005-of-00010.safetensors", "model.layers.48.post_attention_layernorm.weight": "model-00005-of-00010.safetensors", "model.layers.49.self_attn.k_proj.weight": "model-00005-of-00010.safetensors", "model.layers.48.self_attn.k_proj.weight": "model-00005-of-00010.safetensors", "model.layers.49.self_attn.o_proj.weight": "model-00005-of-00010.safetensors", "model.layers.48.self_attn.o_proj.weight": "model-00005-of-00010.safetensors", "model.layers.49.self_attn.q_proj.weight": "model-00005-of-00010.safetensors", "model.layers.48.self_attn.q_proj.weight": "model-00005-of-00010.safetensors", "model.layers.49.self_attn.v_proj.weight": "model-00005-of-00010.safetensors", "model.layers.48.self_attn.v_proj.weight": "model-00005-of-00010.safetensors", "model.layers.51.input_layernorm.weight": "model-00005-of-00010.safetensors", "model.layers.50.input_layernorm.weight": "model-00005-of-00010.safetensors", "model.layers.51.mlp.down_proj.weight": "model-00005-of-00010.safetensors", "model.layers.50.mlp.down_proj.weight": "model-00005-of-00010.safetensors", "model.layers.51.mlp.gate_proj.weight": "model-00005-of-00010.safetensors", "model.layers.50.mlp.gate_proj.weight": "model-00005-of-00010.safetensors", "model.layers.51.mlp.up_proj.weight": "model-00005-of-00010.safetensors", "model.layers.50.mlp.up_proj.weight": "model-00005-of-00010.safetensors", "model.layers.51.post_attention_layernorm.weight": "model-00005-of-00010.safetensors", "model.layers.50.post_attention_layernorm.weight": "model-00005-of-00010.safetensors", "model.layers.51.self_attn.k_proj.weight": "model-00005-of-00010.safetensors", "model.layers.50.self_attn.k_proj.weight": "model-00005-of-00010.safetensors", "model.layers.51.self_attn.o_proj.weight": "model-00005-of-00010.safetensors", "model.layers.50.self_attn.o_proj.weight": "model-00005-of-00010.safetensors", "model.layers.51.self_attn.q_proj.weight": "model-00005-of-00010.safetensors", "model.layers.50.self_attn.q_proj.weight": "model-00005-of-00010.safetensors", "model.layers.51.self_attn.v_proj.weight": "model-00005-of-00010.safetensors", "model.layers.50.self_attn.v_proj.weight": "model-00005-of-00010.safetensors", "model.layers.53.input_layernorm.weight": "model-00005-of-00010.safetensors", "model.layers.52.input_layernorm.weight": "model-00005-of-00010.safetensors", "model.layers.53.mlp.down_proj.weight": "model-00005-of-00010.safetensors", "model.layers.52.mlp.down_proj.weight": "model-00005-of-00010.safetensors", "model.layers.53.mlp.gate_proj.weight": "model-00005-of-00010.safetensors", "model.layers.52.mlp.gate_proj.weight": "model-00005-of-00010.safetensors", "model.layers.53.mlp.up_proj.weight": "model-00005-of-00010.safetensors", "model.layers.52.mlp.up_proj.weight": "model-00005-of-00010.safetensors", "model.layers.53.post_attention_layernorm.weight": "model-00005-of-00010.safetensors", "model.layers.52.post_attention_layernorm.weight": "model-00005-of-00010.safetensors", "model.layers.53.self_attn.k_proj.weight": "model-00005-of-00010.safetensors", "model.layers.52.self_attn.k_proj.weight": "model-00005-of-00010.safetensors", "model.layers.53.self_attn.o_proj.weight": "model-00005-of-00010.safetensors", "model.layers.52.self_attn.o_proj.weight": "model-00005-of-00010.safetensors", "model.layers.53.self_attn.q_proj.weight": "model-00005-of-00010.safetensors", "model.layers.52.self_attn.q_proj.weight": "model-00005-of-00010.safetensors", "model.layers.53.self_attn.v_proj.weight": "model-00005-of-00010.safetensors", "model.layers.52.self_attn.v_proj.weight": "model-00005-of-00010.safetensors", "model.layers.55.input_layernorm.weight": "model-00005-of-00010.safetensors", "model.layers.54.input_layernorm.weight": "model-00005-of-00010.safetensors", "model.layers.55.mlp.down_proj.weight": "model-00006-of-00010.safetensors", "model.layers.54.mlp.down_proj.weight": "model-00006-of-00010.safetensors", "model.layers.55.mlp.gate_proj.weight": "model-00006-of-00010.safetensors", "model.layers.54.mlp.gate_proj.weight": "model-00006-of-00010.safetensors", "model.layers.55.mlp.up_proj.weight": "model-00006-of-00010.safetensors", "model.layers.54.mlp.up_proj.weight": "model-00006-of-00010.safetensors", "model.layers.55.post_attention_layernorm.weight": "model-00006-of-00010.safetensors", "model.layers.54.post_attention_layernorm.weight": "model-00006-of-00010.safetensors", "model.layers.55.self_attn.k_proj.weight": "model-00006-of-00010.safetensors", "model.layers.54.self_attn.k_proj.weight": "model-00006-of-00010.safetensors", "model.layers.55.self_attn.o_proj.weight": "model-00006-of-00010.safetensors", "model.layers.54.self_attn.o_proj.weight": "model-00006-of-00010.safetensors", "model.layers.55.self_attn.q_proj.weight": "model-00006-of-00010.safetensors", "model.layers.54.self_attn.q_proj.weight": "model-00006-of-00010.safetensors", "model.layers.55.self_attn.v_proj.weight": "model-00006-of-00010.safetensors", "model.layers.54.self_attn.v_proj.weight": "model-00006-of-00010.safetensors", "model.layers.57.input_layernorm.weight": "model-00006-of-00010.safetensors", "model.layers.56.input_layernorm.weight": "model-00006-of-00010.safetensors", "model.layers.57.mlp.down_proj.weight": "model-00006-of-00010.safetensors", "model.layers.56.mlp.down_proj.weight": "model-00006-of-00010.safetensors", "model.layers.57.mlp.gate_proj.weight": "model-00006-of-00010.safetensors", "model.layers.56.mlp.gate_proj.weight": "model-00006-of-00010.safetensors", "model.layers.57.mlp.up_proj.weight": "model-00006-of-00010.safetensors", "model.layers.56.mlp.up_proj.weight": "model-00006-of-00010.safetensors", "model.layers.57.post_attention_layernorm.weight": "model-00006-of-00010.safetensors", "model.layers.56.post_attention_layernorm.weight": "model-00006-of-00010.safetensors", "model.layers.57.self_attn.k_proj.weight": "model-00006-of-00010.safetensors", "model.layers.56.self_attn.k_proj.weight": "model-00006-of-00010.safetensors", "model.layers.57.self_attn.o_proj.weight": "model-00006-of-00010.safetensors", "model.layers.56.self_attn.o_proj.weight": "model-00006-of-00010.safetensors", "model.layers.57.self_attn.q_proj.weight": "model-00006-of-00010.safetensors", "model.layers.56.self_attn.q_proj.weight": "model-00006-of-00010.safetensors", "model.layers.57.self_attn.v_proj.weight": "model-00006-of-00010.safetensors", "model.layers.56.self_attn.v_proj.weight": "model-00006-of-00010.safetensors", "model.layers.59.input_layernorm.weight": "model-00006-of-00010.safetensors", "model.layers.58.input_layernorm.weight": "model-00006-of-00010.safetensors", "model.layers.59.mlp.down_proj.weight": "model-00006-of-00010.safetensors", "model.layers.58.mlp.down_proj.weight": "model-00006-of-00010.safetensors", "model.layers.59.mlp.gate_proj.weight": "model-00006-of-00010.safetensors", "model.layers.58.mlp.gate_proj.weight": "model-00006-of-00010.safetensors", "model.layers.59.mlp.up_proj.weight": "model-00006-of-00010.safetensors", "model.layers.58.mlp.up_proj.weight": "model-00006-of-00010.safetensors", "model.layers.59.post_attention_layernorm.weight": "model-00006-of-00010.safetensors", "model.layers.58.post_attention_layernorm.weight": "model-00006-of-00010.safetensors", "model.layers.59.self_attn.k_proj.weight": "model-00006-of-00010.safetensors", "model.layers.58.self_attn.k_proj.weight": "model-00006-of-00010.safetensors", "model.layers.59.self_attn.o_proj.weight": "model-00006-of-00010.safetensors", "model.layers.58.self_attn.o_proj.weight": "model-00006-of-00010.safetensors", "model.layers.59.self_attn.q_proj.weight": "model-00006-of-00010.safetensors", "model.layers.58.self_attn.q_proj.weight": "model-00006-of-00010.safetensors", "model.layers.59.self_attn.v_proj.weight": "model-00006-of-00010.safetensors", "model.layers.58.self_attn.v_proj.weight": "model-00006-of-00010.safetensors", "model.layers.7.input_layernorm.weight": "model-00006-of-00010.safetensors", "model.layers.6.input_layernorm.weight": "model-00006-of-00010.safetensors", "model.layers.7.mlp.down_proj.weight": "model-00006-of-00010.safetensors", "model.layers.6.mlp.down_proj.weight": "model-00006-of-00010.safetensors", "model.layers.7.mlp.gate_proj.weight": "model-00006-of-00010.safetensors", "model.layers.6.mlp.gate_proj.weight": "model-00006-of-00010.safetensors", "model.layers.7.mlp.up_proj.weight": "model-00006-of-00010.safetensors", "model.layers.6.mlp.up_proj.weight": "model-00006-of-00010.safetensors", "model.layers.7.post_attention_layernorm.weight": "model-00006-of-00010.safetensors", "model.layers.6.post_attention_layernorm.weight": "model-00006-of-00010.safetensors", "model.layers.7.self_attn.k_proj.weight": "model-00006-of-00010.safetensors", "model.layers.6.self_attn.k_proj.weight": "model-00006-of-00010.safetensors", "model.layers.7.self_attn.o_proj.weight": "model-00006-of-00010.safetensors", "model.layers.6.self_attn.o_proj.weight": "model-00006-of-00010.safetensors", "model.layers.7.self_attn.q_proj.weight": "model-00006-of-00010.safetensors", "model.layers.6.self_attn.q_proj.weight": "model-00006-of-00010.safetensors", "model.layers.7.self_attn.v_proj.weight": "model-00006-of-00010.safetensors", "model.layers.6.self_attn.v_proj.weight": "model-00006-of-00010.safetensors", "model.layers.61.input_layernorm.weight": "model-00006-of-00010.safetensors", "model.layers.60.input_layernorm.weight": "model-00006-of-00010.safetensors", "model.layers.61.mlp.down_proj.weight": "model-00006-of-00010.safetensors", "model.layers.60.mlp.down_proj.weight": "model-00006-of-00010.safetensors", "model.layers.61.mlp.gate_proj.weight": "model-00006-of-00010.safetensors", "model.layers.60.mlp.gate_proj.weight": "model-00006-of-00010.safetensors", "model.layers.61.mlp.up_proj.weight": "model-00007-of-00010.safetensors", "model.layers.60.mlp.up_proj.weight": "model-00007-of-00010.safetensors", "model.layers.61.post_attention_layernorm.weight": "model-00007-of-00010.safetensors", "model.layers.60.post_attention_layernorm.weight": "model-00007-of-00010.safetensors", "model.layers.61.self_attn.k_proj.weight": "model-00007-of-00010.safetensors", "model.layers.60.self_attn.k_proj.weight": "model-00007-of-00010.safetensors", "model.layers.61.self_attn.o_proj.weight": "model-00007-of-00010.safetensors", "model.layers.60.self_attn.o_proj.weight": "model-00007-of-00010.safetensors", "model.layers.61.self_attn.q_proj.weight": "model-00007-of-00010.safetensors", "model.layers.60.self_attn.q_proj.weight": "model-00007-of-00010.safetensors", "model.layers.61.self_attn.v_proj.weight": "model-00007-of-00010.safetensors", "model.layers.60.self_attn.v_proj.weight": "model-00007-of-00010.safetensors", "model.layers.63.input_layernorm.weight": "model-00007-of-00010.safetensors", "model.layers.62.input_layernorm.weight": "model-00007-of-00010.safetensors", "model.layers.63.mlp.down_proj.weight": "model-00007-of-00010.safetensors", "model.layers.62.mlp.down_proj.weight": "model-00007-of-00010.safetensors", "model.layers.63.mlp.gate_proj.weight": "model-00007-of-00010.safetensors", "model.layers.62.mlp.gate_proj.weight": "model-00007-of-00010.safetensors", "model.layers.63.mlp.up_proj.weight": "model-00007-of-00010.safetensors", "model.layers.62.mlp.up_proj.weight": "model-00007-of-00010.safetensors", "model.layers.63.post_attention_layernorm.weight": "model-00007-of-00010.safetensors", "model.layers.62.post_attention_layernorm.weight": "model-00007-of-00010.safetensors", "model.layers.63.self_attn.k_proj.weight": "model-00007-of-00010.safetensors", "model.layers.62.self_attn.k_proj.weight": "model-00007-of-00010.safetensors", "model.layers.63.self_attn.o_proj.weight": "model-00007-of-00010.safetensors", "model.layers.62.self_attn.o_proj.weight": "model-00007-of-00010.safetensors", "model.layers.63.self_attn.q_proj.weight": "model-00007-of-00010.safetensors", "model.layers.62.self_attn.q_proj.weight": "model-00007-of-00010.safetensors", "model.layers.63.self_attn.v_proj.weight": "model-00007-of-00010.safetensors", "model.layers.62.self_attn.v_proj.weight": "model-00007-of-00010.safetensors", "model.layers.65.input_layernorm.weight": "model-00007-of-00010.safetensors", "model.layers.64.input_layernorm.weight": "model-00007-of-00010.safetensors", "model.layers.65.mlp.down_proj.weight": "model-00007-of-00010.safetensors", "model.layers.64.mlp.down_proj.weight": "model-00007-of-00010.safetensors", "model.layers.65.mlp.gate_proj.weight": "model-00007-of-00010.safetensors", "model.layers.64.mlp.gate_proj.weight": "model-00007-of-00010.safetensors", "model.layers.65.mlp.up_proj.weight": "model-00007-of-00010.safetensors", "model.layers.64.mlp.up_proj.weight": "model-00007-of-00010.safetensors", "model.layers.65.post_attention_layernorm.weight": "model-00007-of-00010.safetensors", "model.layers.64.post_attention_layernorm.weight": "model-00007-of-00010.safetensors", "model.layers.65.self_attn.k_proj.weight": "model-00007-of-00010.safetensors", "model.layers.64.self_attn.k_proj.weight": "model-00007-of-00010.safetensors", "model.layers.65.self_attn.o_proj.weight": "model-00007-of-00010.safetensors", "model.layers.64.self_attn.o_proj.weight": "model-00007-of-00010.safetensors", "model.layers.65.self_attn.q_proj.weight": "model-00007-of-00010.safetensors", "model.layers.64.self_attn.q_proj.weight": "model-00007-of-00010.safetensors", "model.layers.65.self_attn.v_proj.weight": "model-00007-of-00010.safetensors", "model.layers.64.self_attn.v_proj.weight": "model-00007-of-00010.safetensors", "model.layers.67.input_layernorm.weight": "model-00007-of-00010.safetensors", "model.layers.66.input_layernorm.weight": "model-00007-of-00010.safetensors", "model.layers.67.mlp.down_proj.weight": "model-00007-of-00010.safetensors", "model.layers.66.mlp.down_proj.weight": "model-00007-of-00010.safetensors", "model.layers.67.mlp.gate_proj.weight": "model-00007-of-00010.safetensors", "model.layers.66.mlp.gate_proj.weight": "model-00007-of-00010.safetensors", "model.layers.67.mlp.up_proj.weight": "model-00007-of-00010.safetensors", "model.layers.66.mlp.up_proj.weight": "model-00007-of-00010.safetensors", "model.layers.67.post_attention_layernorm.weight": "model-00007-of-00010.safetensors", "model.layers.66.post_attention_layernorm.weight": "model-00007-of-00010.safetensors", "model.layers.67.self_attn.k_proj.weight": "model-00007-of-00010.safetensors", "model.layers.66.self_attn.k_proj.weight": "model-00007-of-00010.safetensors", "model.layers.67.self_attn.o_proj.weight": "model-00007-of-00010.safetensors", "model.layers.66.self_attn.o_proj.weight": "model-00007-of-00010.safetensors", "model.layers.67.self_attn.q_proj.weight": "model-00007-of-00010.safetensors", "model.layers.66.self_attn.q_proj.weight": "model-00007-of-00010.safetensors", "model.layers.67.self_attn.v_proj.weight": "model-00007-of-00010.safetensors", "model.layers.66.self_attn.v_proj.weight": "model-00007-of-00010.safetensors", "model.layers.69.input_layernorm.weight": "model-00007-of-00010.safetensors", "model.layers.68.input_layernorm.weight": "model-00007-of-00010.safetensors", "model.layers.69.mlp.down_proj.weight": "model-00007-of-00010.safetensors", "model.layers.68.mlp.down_proj.weight": "model-00007-of-00010.safetensors", "model.layers.69.mlp.gate_proj.weight": "model-00007-of-00010.safetensors", "model.layers.68.mlp.gate_proj.weight": "model-00007-of-00010.safetensors", "model.layers.69.mlp.up_proj.weight": "model-00007-of-00010.safetensors", "model.layers.68.mlp.up_proj.weight": "model-00007-of-00010.safetensors", "model.layers.69.post_attention_layernorm.weight": "model-00007-of-00010.safetensors", "model.layers.68.post_attention_layernorm.weight": "model-00007-of-00010.safetensors", "model.layers.69.self_attn.k_proj.weight": "model-00007-of-00010.safetensors", "model.layers.68.self_attn.k_proj.weight": "model-00007-of-00010.safetensors", "model.layers.69.self_attn.o_proj.weight": "model-00007-of-00010.safetensors", "model.layers.68.self_attn.o_proj.weight": "model-00007-of-00010.safetensors", "model.layers.69.self_attn.q_proj.weight": "model-00007-of-00010.safetensors", "model.layers.68.self_attn.q_proj.weight": "model-00007-of-00010.safetensors", "model.layers.69.self_attn.v_proj.weight": "model-00007-of-00010.safetensors", "model.layers.68.self_attn.v_proj.weight": "model-00007-of-00010.safetensors", "model.layers.71.input_layernorm.weight": "model-00007-of-00010.safetensors", "model.layers.70.input_layernorm.weight": "model-00007-of-00010.safetensors", "model.layers.71.mlp.down_proj.weight": "model-00008-of-00010.safetensors", "model.layers.70.mlp.down_proj.weight": "model-00008-of-00010.safetensors", "model.layers.71.mlp.gate_proj.weight": "model-00008-of-00010.safetensors", "model.layers.70.mlp.gate_proj.weight": "model-00008-of-00010.safetensors", "model.layers.71.mlp.up_proj.weight": "model-00008-of-00010.safetensors", "model.layers.70.mlp.up_proj.weight": "model-00008-of-00010.safetensors", "model.layers.71.post_attention_layernorm.weight": "model-00008-of-00010.safetensors", "model.layers.70.post_attention_layernorm.weight": "model-00008-of-00010.safetensors", "model.layers.71.self_attn.k_proj.weight": "model-00008-of-00010.safetensors", "model.layers.70.self_attn.k_proj.weight": "model-00008-of-00010.safetensors", "model.layers.71.self_attn.o_proj.weight": "model-00008-of-00010.safetensors", "model.layers.70.self_attn.o_proj.weight": "model-00008-of-00010.safetensors", "model.layers.71.self_attn.q_proj.weight": "model-00008-of-00010.safetensors", "model.layers.70.self_attn.q_proj.weight": "model-00008-of-00010.safetensors", "model.layers.71.self_attn.v_proj.weight": "model-00008-of-00010.safetensors", "model.layers.70.self_attn.v_proj.weight": "model-00008-of-00010.safetensors", "model.layers.73.input_layernorm.weight": "model-00008-of-00010.safetensors", "model.layers.72.input_layernorm.weight": "model-00008-of-00010.safetensors", "model.layers.73.mlp.down_proj.weight": "model-00008-of-00010.safetensors", "model.layers.72.mlp.down_proj.weight": "model-00008-of-00010.safetensors", "model.layers.73.mlp.gate_proj.weight": "model-00008-of-00010.safetensors", "model.layers.72.mlp.gate_proj.weight": "model-00008-of-00010.safetensors", "model.layers.73.mlp.up_proj.weight": "model-00008-of-00010.safetensors", "model.layers.72.mlp.up_proj.weight": "model-00008-of-00010.safetensors", "model.layers.73.post_attention_layernorm.weight": "model-00008-of-00010.safetensors", "model.layers.72.post_attention_layernorm.weight": "model-00008-of-00010.safetensors", "model.layers.73.self_attn.k_proj.weight": "model-00008-of-00010.safetensors", "model.layers.72.self_attn.k_proj.weight": "model-00008-of-00010.safetensors", "model.layers.73.self_attn.o_proj.weight": "model-00008-of-00010.safetensors", "model.layers.72.self_attn.o_proj.weight": "model-00008-of-00010.safetensors", "model.layers.73.self_attn.q_proj.weight": "model-00008-of-00010.safetensors", "model.layers.72.self_attn.q_proj.weight": "model-00008-of-00010.safetensors", "model.layers.73.self_attn.v_proj.weight": "model-00008-of-00010.safetensors", "model.layers.72.self_attn.v_proj.weight": "model-00008-of-00010.safetensors", "model.layers.75.input_layernorm.weight": "model-00008-of-00010.safetensors", "model.layers.74.input_layernorm.weight": "model-00008-of-00010.safetensors", "model.layers.75.mlp.down_proj.weight": "model-00008-of-00010.safetensors", "model.layers.74.mlp.down_proj.weight": "model-00008-of-00010.safetensors", "model.layers.75.mlp.gate_proj.weight": "model-00008-of-00010.safetensors", "model.layers.74.mlp.gate_proj.weight": "model-00008-of-00010.safetensors", "model.layers.75.mlp.up_proj.weight": "model-00008-of-00010.safetensors", "model.layers.74.mlp.up_proj.weight": "model-00008-of-00010.safetensors", "model.layers.75.post_attention_layernorm.weight": "model-00008-of-00010.safetensors", "model.layers.74.post_attention_layernorm.weight": "model-00008-of-00010.safetensors", "model.layers.75.self_attn.k_proj.weight": "model-00008-of-00010.safetensors", "model.layers.74.self_attn.k_proj.weight": "model-00008-of-00010.safetensors", "model.layers.75.self_attn.o_proj.weight": "model-00008-of-00010.safetensors", "model.layers.74.self_attn.o_proj.weight": "model-00008-of-00010.safetensors", "model.layers.75.self_attn.q_proj.weight": "model-00008-of-00010.safetensors", "model.layers.74.self_attn.q_proj.weight": "model-00008-of-00010.safetensors", "model.layers.75.self_attn.v_proj.weight": "model-00008-of-00010.safetensors", "model.layers.74.self_attn.v_proj.weight": "model-00008-of-00010.safetensors", "model.layers.77.input_layernorm.weight": "model-00008-of-00010.safetensors", "model.layers.76.input_layernorm.weight": "model-00008-of-00010.safetensors", "model.layers.77.mlp.down_proj.weight": "model-00008-of-00010.safetensors", "model.layers.76.mlp.down_proj.weight": "model-00008-of-00010.safetensors", "model.layers.77.mlp.gate_proj.weight": "model-00008-of-00010.safetensors", "model.layers.76.mlp.gate_proj.weight": "model-00008-of-00010.safetensors", "model.layers.77.mlp.up_proj.weight": "model-00008-of-00010.safetensors", "model.layers.76.mlp.up_proj.weight": "model-00008-of-00010.safetensors", "model.layers.77.post_attention_layernorm.weight": "model-00008-of-00010.safetensors", "model.layers.76.post_attention_layernorm.weight": "model-00008-of-00010.safetensors", "model.layers.77.self_attn.k_proj.weight": "model-00008-of-00010.safetensors", "model.layers.76.self_attn.k_proj.weight": "model-00008-of-00010.safetensors", "model.layers.77.self_attn.o_proj.weight": "model-00008-of-00010.safetensors", "model.layers.76.self_attn.o_proj.weight": "model-00008-of-00010.safetensors", "model.layers.77.self_attn.q_proj.weight": "model-00008-of-00010.safetensors", "model.layers.76.self_attn.q_proj.weight": "model-00008-of-00010.safetensors", "model.layers.77.self_attn.v_proj.weight": "model-00008-of-00010.safetensors", "model.layers.76.self_attn.v_proj.weight": "model-00008-of-00010.safetensors", "model.layers.79.input_layernorm.weight": "model-00008-of-00010.safetensors", "model.layers.78.input_layernorm.weight": "model-00008-of-00010.safetensors", "model.layers.79.mlp.down_proj.weight": "model-00008-of-00010.safetensors", "model.layers.78.mlp.down_proj.weight": "model-00008-of-00010.safetensors", "model.layers.79.mlp.gate_proj.weight": "model-00008-of-00010.safetensors", "model.layers.78.mlp.gate_proj.weight": "model-00008-of-00010.safetensors", "model.layers.79.mlp.up_proj.weight": "model-00009-of-00010.safetensors", "model.layers.78.mlp.up_proj.weight": "model-00009-of-00010.safetensors", "model.layers.79.post_attention_layernorm.weight": "model-00009-of-00010.safetensors", "model.layers.78.post_attention_layernorm.weight": "model-00009-of-00010.safetensors", "model.layers.79.self_attn.k_proj.weight": "model-00009-of-00010.safetensors", "model.layers.78.self_attn.k_proj.weight": "model-00009-of-00010.safetensors", "model.layers.79.self_attn.o_proj.weight": "model-00009-of-00010.safetensors", "model.layers.78.self_attn.o_proj.weight": "model-00009-of-00010.safetensors", "model.layers.79.self_attn.q_proj.weight": "model-00009-of-00010.safetensors", "model.layers.78.self_attn.q_proj.weight": "model-00009-of-00010.safetensors", "model.layers.79.self_attn.v_proj.weight": "model-00009-of-00010.safetensors", "model.layers.78.self_attn.v_proj.weight": "model-00009-of-00010.safetensors", "model.layers.9.input_layernorm.weight": "model-00009-of-00010.safetensors", "model.layers.8.input_layernorm.weight": "model-00009-of-00010.safetensors", "model.layers.9.mlp.down_proj.weight": "model-00009-of-00010.safetensors", "model.layers.8.mlp.down_proj.weight": "model-00009-of-00010.safetensors", "model.layers.9.mlp.gate_proj.weight": "model-00009-of-00010.safetensors", "model.layers.8.mlp.gate_proj.weight": "model-00009-of-00010.safetensors", "model.layers.9.mlp.up_proj.weight": "model-00009-of-00010.safetensors", "model.layers.8.mlp.up_proj.weight": "model-00009-of-00010.safetensors", "model.layers.9.post_attention_layernorm.weight": "model-00009-of-00010.safetensors", "model.layers.8.post_attention_layernorm.weight": "model-00009-of-00010.safetensors", "model.layers.9.self_attn.k_proj.weight": "model-00009-of-00010.safetensors", "model.layers.8.self_attn.k_proj.weight": "model-00009-of-00010.safetensors", "model.layers.9.self_attn.o_proj.weight": "model-00009-of-00010.safetensors", "model.layers.8.self_attn.o_proj.weight": "model-00009-of-00010.safetensors", "model.layers.9.self_attn.q_proj.weight": "model-00009-of-00010.safetensors", "model.layers.8.self_attn.q_proj.weight": "model-00009-of-00010.safetensors", "model.layers.9.self_attn.v_proj.weight": "model-00009-of-00010.safetensors", "model.layers.8.self_attn.v_proj.weight": "model-00009-of-00010.safetensors", "model.layers.11.input_layernorm.weight": "model-00009-of-00010.safetensors", "model.layers.10.input_layernorm.weight": "model-00009-of-00010.safetensors", "model.layers.11.mlp.down_proj.weight": "model-00009-of-00010.safetensors", "model.layers.10.mlp.down_proj.weight": "model-00009-of-00010.safetensors", "model.layers.11.mlp.gate_proj.weight": "model-00009-of-00010.safetensors", "model.layers.10.mlp.gate_proj.weight": "model-00009-of-00010.safetensors", "model.layers.11.mlp.up_proj.weight": "model-00009-of-00010.safetensors", "model.layers.10.mlp.up_proj.weight": "model-00009-of-00010.safetensors", "model.layers.11.post_attention_layernorm.weight": "model-00009-of-00010.safetensors", "model.layers.10.post_attention_layernorm.weight": "model-00009-of-00010.safetensors", "model.layers.11.self_attn.k_proj.weight": "model-00009-of-00010.safetensors", "model.layers.10.self_attn.k_proj.weight": "model-00009-of-00010.safetensors", "model.layers.11.self_attn.o_proj.weight": "model-00009-of-00010.safetensors", "model.layers.10.self_attn.o_proj.weight": "model-00009-of-00010.safetensors", "model.layers.11.self_attn.q_proj.weight": "model-00009-of-00010.safetensors", "model.layers.10.self_attn.q_proj.weight": "model-00009-of-00010.safetensors", "model.layers.11.self_attn.v_proj.weight": "model-00009-of-00010.safetensors", "model.layers.10.self_attn.v_proj.weight": "model-00009-of-00010.safetensors", "model.layers.13.input_layernorm.weight": "model-00009-of-00010.safetensors", "model.layers.12.input_layernorm.weight": "model-00009-of-00010.safetensors", "model.layers.13.mlp.down_proj.weight": "model-00009-of-00010.safetensors", "model.layers.12.mlp.down_proj.weight": "model-00009-of-00010.safetensors", "model.layers.13.mlp.gate_proj.weight": "model-00009-of-00010.safetensors", "model.layers.12.mlp.gate_proj.weight": "model-00009-of-00010.safetensors", "model.layers.13.mlp.up_proj.weight": "model-00009-of-00010.safetensors", "model.layers.12.mlp.up_proj.weight": "model-00009-of-00010.safetensors", "model.layers.13.post_attention_layernorm.weight": "model-00009-of-00010.safetensors", "model.layers.12.post_attention_layernorm.weight": "model-00009-of-00010.safetensors", "model.layers.13.self_attn.k_proj.weight": "model-00009-of-00010.safetensors", "model.layers.12.self_attn.k_proj.weight": "model-00009-of-00010.safetensors", "model.layers.13.self_attn.o_proj.weight": "model-00009-of-00010.safetensors", "model.layers.12.self_attn.o_proj.weight": "model-00009-of-00010.safetensors", "model.layers.13.self_attn.q_proj.weight": "model-00009-of-00010.safetensors", "model.layers.12.self_attn.q_proj.weight": "model-00009-of-00010.safetensors", "model.layers.13.self_attn.v_proj.weight": "model-00009-of-00010.safetensors", "model.layers.12.self_attn.v_proj.weight": "model-00009-of-00010.safetensors", "model.layers.15.input_layernorm.weight": "model-00009-of-00010.safetensors", "model.layers.14.input_layernorm.weight": "model-00009-of-00010.safetensors", "model.layers.15.mlp.down_proj.weight": "model-00009-of-00010.safetensors", "model.layers.14.mlp.down_proj.weight": "model-00009-of-00010.safetensors", "model.layers.15.mlp.gate_proj.weight": "model-00009-of-00010.safetensors", "model.layers.14.mlp.gate_proj.weight": "model-00009-of-00010.safetensors", "model.layers.15.mlp.up_proj.weight": "model-00009-of-00010.safetensors", "model.layers.14.mlp.up_proj.weight": "model-00009-of-00010.safetensors", "model.layers.15.post_attention_layernorm.weight": "model-00009-of-00010.safetensors", "model.layers.14.post_attention_layernorm.weight": "model-00009-of-00010.safetensors", "model.layers.15.self_attn.k_proj.weight": "model-00009-of-00010.safetensors", "model.layers.14.self_attn.k_proj.weight": "model-00009-of-00010.safetensors", "model.layers.15.self_attn.o_proj.weight": "model-00009-of-00010.safetensors", "model.layers.14.self_attn.o_proj.weight": "model-00009-of-00010.safetensors", "model.layers.15.self_attn.q_proj.weight": "model-00009-of-00010.safetensors", "model.layers.14.self_attn.q_proj.weight": "model-00009-of-00010.safetensors", "model.layers.15.self_attn.v_proj.weight": "model-00009-of-00010.safetensors", "model.layers.14.self_attn.v_proj.weight": "model-00009-of-00010.safetensors", "model.layers.17.input_layernorm.weight": "model-00009-of-00010.safetensors", "model.layers.16.input_layernorm.weight": "model-00009-of-00010.safetensors", "model.layers.17.mlp.down_proj.weight": "model-00010-of-00010.safetensors", "model.layers.16.mlp.down_proj.weight": "model-00010-of-00010.safetensors", "model.layers.17.mlp.gate_proj.weight": "model-00010-of-00010.safetensors", "model.layers.16.mlp.gate_proj.weight": "model-00010-of-00010.safetensors", "model.layers.17.mlp.up_proj.weight": "model-00010-of-00010.safetensors", "model.layers.16.mlp.up_proj.weight": "model-00010-of-00010.safetensors", "model.layers.17.post_attention_layernorm.weight": "model-00010-of-00010.safetensors", "model.layers.16.post_attention_layernorm.weight": "model-00010-of-00010.safetensors", "model.layers.17.self_attn.k_proj.weight": "model-00010-of-00010.safetensors", "model.layers.16.self_attn.k_proj.weight": "model-00010-of-00010.safetensors", "model.layers.17.self_attn.o_proj.weight": "model-00010-of-00010.safetensors", "model.layers.16.self_attn.o_proj.weight": "model-00010-of-00010.safetensors", "model.layers.17.self_attn.q_proj.weight": "model-00010-of-00010.safetensors", "model.layers.16.self_attn.q_proj.weight": "model-00010-of-00010.safetensors", "model.layers.17.self_attn.v_proj.weight": "model-00010-of-00010.safetensors", "model.layers.16.self_attn.v_proj.weight": "model-00010-of-00010.safetensors", "model.layers.19.input_layernorm.weight": "model-00010-of-00010.safetensors", "model.layers.18.input_layernorm.weight": "model-00010-of-00010.safetensors", "model.layers.19.mlp.down_proj.weight": "model-00010-of-00010.safetensors", "model.layers.18.mlp.down_proj.weight": "model-00010-of-00010.safetensors", "model.layers.19.mlp.gate_proj.weight": "model-00010-of-00010.safetensors", "model.layers.18.mlp.gate_proj.weight": "model-00010-of-00010.safetensors", "model.layers.19.mlp.up_proj.weight": "model-00010-of-00010.safetensors", "model.layers.18.mlp.up_proj.weight": "model-00010-of-00010.safetensors", "model.layers.19.post_attention_layernorm.weight": "model-00010-of-00010.safetensors", "model.layers.18.post_attention_layernorm.weight": "model-00010-of-00010.safetensors", "model.layers.19.self_attn.k_proj.weight": "model-00010-of-00010.safetensors", "model.layers.18.self_attn.k_proj.weight": "model-00010-of-00010.safetensors", "model.layers.19.self_attn.o_proj.weight": "model-00010-of-00010.safetensors", "model.layers.18.self_attn.o_proj.weight": "model-00010-of-00010.safetensors", "model.layers.19.self_attn.q_proj.weight": "model-00010-of-00010.safetensors", "model.layers.18.self_attn.q_proj.weight": "model-00010-of-00010.safetensors", "model.layers.19.self_attn.v_proj.weight": "model-00010-of-00010.safetensors", "model.layers.18.self_attn.v_proj.weight": "model-00010-of-00010.safetensors", "model.norm.weight": "model-00010-of-00010.safetensors"}}
special_tokens_map.json ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": {
3
+ "content": "<s>",
4
+ "lstrip": false,
5
+ "normalized": false,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "<|im_end|>",
11
+ "lstrip": false,
12
+ "normalized": false,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "pad_token": {
17
+ "content": "<pad>",
18
+ "lstrip": false,
19
+ "normalized": false,
20
+ "rstrip": false,
21
+ "single_word": false
22
+ },
23
+ "unk_token": {
24
+ "content": "<unk>",
25
+ "lstrip": false,
26
+ "normalized": false,
27
+ "rstrip": false,
28
+ "single_word": false
29
+ }
30
+ }
tokenizer.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2e3129bdf0fb0ea7f9a0a5d1219e6fafdd58741cb66308127d07337ac9fd71bf
3
+ size 17078311
tokenizer_config.json ADDED
The diff for this file is too large to render. See raw diff