sleepdeprived3 commited on
Commit
534c0c9
·
verified ·
1 Parent(s): cdd5f24

Upload 12 files

Browse files
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ tokenizer.json filter=lfs diff=lfs merge=lfs -text
LICENSE ADDED
@@ -0,0 +1 @@
 
 
1
+ Refer to the original model for licensing information.
README.md CHANGED
@@ -1,3 +1,515 @@
1
- ---
2
- license: apache-2.0
3
- ---
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ license: apache-2.0
3
+ language:
4
+ - en
5
+ base_model:
6
+ - mistralai/Mistral-Nemo-Instruct-2407
7
+ base_model_relation: finetune
8
+ pipeline_tag: text-generation
9
+ tags:
10
+ - Reformed Baptist
11
+ - 1689 Confession
12
+ - Calvinism
13
+ - Covenant Theology
14
+ - Bible
15
+ - Theology
16
+ - Protestant
17
+ ---
18
+ <style>
19
+ body {
20
+ font-family: 'Quicksand', sans-serif;
21
+ background: linear-gradient(135deg, #0a1a1a 0%, #001010 100%);
22
+ color: #e1ffff !important;
23
+ text-shadow: 0 0 3px rgba(0, 0, 0, 0.7);
24
+ margin: 0;
25
+ padding: 20px;
26
+ transition: all 0.5s ease;
27
+ }
28
+
29
+ @media (prefers-color-scheme: light) {
30
+ body {
31
+ background: linear-gradient(135deg, #e1ffff 0%, #c0f0ff 100%);
32
+ color: #002b36 !important;
33
+ text-shadow: 0 0 3px rgba(255, 255, 255, 0.7);
34
+ }
35
+ }
36
+
37
+ .container {
38
+ min-width: 100%;
39
+ margin: 0 auto;
40
+ max-width: 1200px;
41
+ background: rgba(0, 17, 22, 0.95);
42
+ border-radius: 12px;
43
+ padding: 30px;
44
+ box-shadow: 0 0 20px rgba(0, 255, 255, 0.1);
45
+ border: 1px solid rgba(0, 255, 255, 0.2);
46
+ position: relative;
47
+ overflow: hidden;
48
+ }
49
+
50
+ .container::before {
51
+ content: '';
52
+ position: absolute;
53
+ top: -1px;
54
+ left: -1px;
55
+ right: -1px;
56
+ bottom: -1px;
57
+ border: 1px solid rgba(0, 255, 255, 0.5);
58
+ border-radius: 12px;
59
+ pointer-events: none;
60
+ animation: borderGlow 3s ease-in-out infinite alternate;
61
+ }
62
+
63
+ @keyframes borderGlow {
64
+ 0% {
65
+ box-shadow: 0 0 5px rgba(0, 255, 255, 0.3);
66
+ border-color: rgba(0, 255, 255, 0.5);
67
+ }
68
+ 50% {
69
+ box-shadow: 0 0 15px rgba(0, 100, 255, 0.3);
70
+ border-color: rgba(0, 100, 255, 0.5);
71
+ }
72
+ 100% {
73
+ box-shadow: 0 0 5px rgba(0, 255, 255, 0.3);
74
+ border-color: rgba(0, 255, 255, 0.5);
75
+ }
76
+ }
77
+
78
+ .header {
79
+ text-align: center;
80
+ margin-bottom: 30px;
81
+ position: relative;
82
+ }
83
+
84
+ .header::after {
85
+ content: '';
86
+ position: absolute;
87
+ bottom: -15px;
88
+ left: 25%;
89
+ right: 25%;
90
+ height: 1px;
91
+ background: linear-gradient(90deg, transparent, rgba(0, 255, 255, 0.5), transparent);
92
+ animation: scanline 8s linear infinite;
93
+ }
94
+
95
+ @keyframes scanline {
96
+ 0% { background-position: -100% 0; }
97
+ 100% { background-position: 200% 0; }
98
+ }
99
+
100
+ .model-name {
101
+ color: #00ffff;
102
+ font-size: 2.5em;
103
+ text-shadow: 0 0 15px rgba(0, 255, 255, 0.5);
104
+ margin: 0;
105
+ letter-spacing: -1px;
106
+ animation: textGlow 4s ease-in-out infinite alternate;
107
+ }
108
+
109
+ @keyframes textGlow {
110
+ 0% { text-shadow: 0 0 15px rgba(0, 255, 255, 0.5); }
111
+ 50% { text-shadow: 0 0 20px rgba(0, 100, 255, 0.5); }
112
+ 100% { text-shadow: 0 0 15px rgba(0, 255, 255, 0.5); }
113
+ }
114
+
115
+ .subtitle {
116
+ color: #00ffcc;
117
+ font-size: 1.2em;
118
+ margin-top: 10px;
119
+ animation: subtitleFade 6s ease-in-out infinite;
120
+ }
121
+
122
+ @keyframes subtitleFade {
123
+ 0%, 100% { opacity: 0.8; }
124
+ 50% { opacity: 1; }
125
+ }
126
+
127
+ .bible-container {
128
+ margin: 20px -30px;
129
+ width: calc(100% + 60px);
130
+ overflow: hidden;
131
+ border-radius: 8px;
132
+ border: 1px solid rgba(0, 255, 255, 0.3);
133
+ position: relative;
134
+ }
135
+
136
+ .bible-container::before {
137
+ content: '';
138
+ position: absolute;
139
+ top: 0;
140
+ left: 0;
141
+ right: 0;
142
+ bottom: 0;
143
+ background: linear-gradient(45deg,
144
+ rgba(0, 255, 255, 0.1) 0%,
145
+ transparent 20%,
146
+ transparent 80%,
147
+ rgba(0, 100, 255, 0.1) 100%);
148
+ pointer-events: none;
149
+ animation: gradientSlide 10s linear infinite;
150
+ }
151
+
152
+ @keyframes gradientSlide {
153
+ 0% { background-position: 0% 0%; }
154
+ 100% { background-position: 100% 100%; }
155
+ }
156
+
157
+ .bible-img {
158
+ width: 100%;
159
+ height: auto;
160
+ border-radius: 0;
161
+ border: none;
162
+ box-shadow: 0 0 40px rgba(0, 255, 255, 0.2);
163
+ transition: transform 0.5s ease;
164
+ }
165
+
166
+ .bible-img:hover {
167
+ transform: scale(1.01);
168
+ }
169
+
170
+ .section {
171
+ color: #e1ffff;
172
+ margin: 25px 0;
173
+ padding: 20px;
174
+ background: rgba(5, 25, 35, 0.9);
175
+ border-radius: 8px;
176
+ border: 1px solid rgba(0, 255, 255, 0.15);
177
+ position: relative;
178
+ transition: all 0.3s ease;
179
+ }
180
+
181
+ .section:hover {
182
+ border-color: rgba(0, 100, 255, 0.3);
183
+ box-shadow: 0 0 15px rgba(0, 255, 255, 0.1);
184
+ }
185
+
186
+ .section::before {
187
+ content: '';
188
+ position: absolute;
189
+ top: -1px;
190
+ left: -1px;
191
+ right: -1px;
192
+ bottom: -1px;
193
+ border: 1px solid rgba(0, 255, 255, 0.3);
194
+ border-radius: 8px;
195
+ pointer-events: none;
196
+ animation: sectionPulse 5s ease-in-out infinite;
197
+ }
198
+
199
+ @keyframes sectionPulse {
200
+ 0%, 100% { opacity: 0.7; }
201
+ 50% { opacity: 0.3; }
202
+ }
203
+
204
+ .section-title {
205
+ color: #00ffff;
206
+ font-size: 1.8em;
207
+ margin-top: 0;
208
+ text-shadow: 0 0 5px rgba(0, 255, 255, 0.3);
209
+ position: relative;
210
+ display: inline-block;
211
+ }
212
+
213
+ .section-title::after {
214
+ content: '';
215
+ position: absolute;
216
+ bottom: -5px;
217
+ left: 0;
218
+ width: 100%;
219
+ height: 1px;
220
+ background: linear-gradient(90deg, rgba(0, 255, 255, 0.5), rgba(0, 100, 255, 0.5));
221
+ transform: scaleX(0);
222
+ transform-origin: left;
223
+ transition: transform 0.3s ease;
224
+ }
225
+
226
+ .section:hover .section-title::after {
227
+ transform: scaleX(1);
228
+ }
229
+
230
+ .quant-links {
231
+ display: grid;
232
+ grid-template-columns: repeat(3, 1fr);
233
+ gap: 15px;
234
+ margin: 20px 0;
235
+ }
236
+
237
+ .link-card {
238
+ padding: 15px;
239
+ background: rgba(20, 35, 45, 0.95);
240
+ border-radius: 8px;
241
+ transition: all 0.3s ease;
242
+ border: 1px solid rgba(0, 255, 255, 0.1);
243
+ position: relative;
244
+ overflow: hidden;
245
+ }
246
+
247
+ .link-card::before {
248
+ content: '';
249
+ position: absolute;
250
+ top: 0;
251
+ left: 0;
252
+ right: 0;
253
+ height: 2px;
254
+ background: linear-gradient(90deg, rgba(0, 255, 255, 0.5), rgba(0, 100, 255, 0.5));
255
+ animation: cardScan 4s linear infinite;
256
+ }
257
+
258
+ @keyframes cardScan {
259
+ 0% { transform: translateX(-100%); }
260
+ 100% { transform: translateX(100%); }
261
+ }
262
+
263
+ .link-card:hover {
264
+ transform: translateY(-3px);
265
+ box-shadow: 0 5px 15px rgba(0, 255, 255, 0.2);
266
+ border-color: rgba(0, 100, 255, 0.3);
267
+ }
268
+
269
+ .link-card h3 {
270
+ margin-top: 0;
271
+ color: #e1ffff !important;
272
+ }
273
+
274
+ .link-button {
275
+ display: inline-flex;
276
+ align-items: center;
277
+ background: rgba(0, 255, 255, 0.1);
278
+ color: #e1ffff !important;
279
+ padding: 8px 15px;
280
+ border-radius: 6px;
281
+ text-decoration: none;
282
+ border: 1px solid rgba(0, 255, 255, 0.3);
283
+ margin: 5px 0;
284
+ transition: all 0.3s ease;
285
+ font-size: 0.95em;
286
+ position: relative;
287
+ overflow: hidden;
288
+ }
289
+
290
+ .link-button::before {
291
+ content: '';
292
+ position: absolute;
293
+ top: 0;
294
+ left: -100%;
295
+ width: 100%;
296
+ height: 100%;
297
+ background: linear-gradient(90deg, transparent, rgba(255, 255, 255, 0.2), transparent);
298
+ transition: all 0.5s ease;
299
+ }
300
+
301
+ .link-button:hover {
302
+ background: rgba(0, 255, 255, 0.2);
303
+ border-color: rgba(0, 255, 255, 0.5);
304
+ transform: translateY(-2px);
305
+ box-shadow: 0 4px 12px rgba(0, 255, 255, 0.2);
306
+ }
307
+
308
+ .link-button:hover::before {
309
+ left: 100%;
310
+ }
311
+
312
+ .link-button::after {
313
+ content: '→';
314
+ margin-left: 8px;
315
+ opacity: 0.7;
316
+ transition: all 0.3s ease;
317
+ }
318
+
319
+ .link-button:hover::after {
320
+ transform: translateX(3px);
321
+ opacity: 1;
322
+ }
323
+
324
+ .button-group {
325
+ display: flex;
326
+ flex-wrap: wrap;
327
+ gap: 10px;
328
+ margin: 15px 0;
329
+ }
330
+
331
+ .disclaimer {
332
+ color: #00ff99;
333
+ border-left: 3px solid #00ff99;
334
+ padding-left: 15px;
335
+ margin: 20px 0;
336
+ position: relative;
337
+ }
338
+
339
+ .disclaimer::before {
340
+ content: '⚠️';
341
+ position: absolute;
342
+ left: -10px;
343
+ top: 0;
344
+ transform: translateX(-100%);
345
+ animation: pulse 2s ease-in-out infinite;
346
+ }
347
+
348
+ @keyframes pulse {
349
+ 0%, 100% { opacity: 1; }
350
+ 50% { opacity: 0.5; }
351
+ }
352
+
353
+ .badge {
354
+ display: inline-block;
355
+ padding: 5px 10px;
356
+ border-radius: 5px;
357
+ background: rgba(0, 255, 255, 0.1);
358
+ border: 1px solid #00ffff;
359
+ margin: 5px;
360
+ font-size: 0.9em;
361
+ animation: badgePulse 3s ease-in-out infinite;
362
+ }
363
+
364
+ @keyframes badgePulse {
365
+ 0%, 100% { box-shadow: 0 0 5px rgba(0, 255, 255, 0.3); }
366
+ 50% { box-shadow: 0 0 10px rgba(0, 255, 255, 0.5); }
367
+ }
368
+
369
+ /* Light mode adjustments */
370
+ @media (prefers-color-scheme: light) {
371
+ .container {
372
+ background: rgba(224, 255, 255, 0.95);
373
+ border-color: rgba(0, 150, 150, 0.3);
374
+ }
375
+
376
+ .model-name, .section-title, .subtitle {
377
+ color: #006666;
378
+ text-shadow: 0 0 5px rgba(0, 200, 200, 0.3);
379
+ }
380
+
381
+ .section {
382
+ background: rgba(200, 250, 255, 0.9);
383
+ border-color: rgba(0, 200, 200, 0.2);
384
+ color: #002b36;
385
+ }
386
+
387
+ .link-card {
388
+ background: rgba(150, 230, 255, 0.95);
389
+ border-color: rgba(0, 150, 150, 0.2);
390
+ }
391
+
392
+ .link-card h3 {
393
+ color: #002b36 !important;
394
+ }
395
+
396
+ .link-button {
397
+ background: rgba(0, 150, 150, 0.1);
398
+ color: #002b36 !important;
399
+ border-color: rgba(0, 150, 150, 0.3);
400
+ }
401
+
402
+ .link-button:hover {
403
+ background: rgba(0, 150, 150, 0.2);
404
+ border-color: rgba(0, 150, 150, 0.5);
405
+ }
406
+
407
+ .disclaimer {
408
+ color: #008080;
409
+ border-color: #008080;
410
+ }
411
+
412
+ .badge {
413
+ border-color: #008080;
414
+ background: rgba(0, 150, 150, 0.1);
415
+ }
416
+ }
417
+ </style>
418
+
419
+ <div class="container">
420
+
421
+ <div class="header">
422
+ <h1 class="model-name">Reformed Baptist 1689 Bible Expert v2.0 12B</h1>
423
+ <p class="subtitle">Sola Scriptura, Sola Fide, Sola Gratia</p>
424
+ </div>
425
+
426
+ <div class="section">
427
+ <h2 class="section-title">✝️ Theological Foundation</h2>
428
+ <p>This model provides robust Reformed Baptist analysis grounded in the 1689 London Baptist Confession:</p>
429
+ <ul>
430
+ <li>📖 <strong>Expanded Covenant Theology Focus</strong> - Deep training in Reformed systematic theology and exegesis</li>
431
+ <li>⚡ <strong>Optimized Doctrinal Precision</strong> - Clear articulation of the Five Solas and Doctrines of Grace</li>
432
+ <li>💎 <strong>Confessional Fidelity</strong> - Maintains strict adherence to the 1689 London Baptist Confession</li>
433
+ <li>🎓 <strong>Enhanced Polemical Capabilities</strong> - Improved defense of Reformed soteriology and covenant theology</li>
434
+ <li>🌹 <strong>Pastoral Application</strong> - Practical insights for preaching and discipleship from a Reformed perspective</li>
435
+ </ul>
436
+ </div>
437
+
438
+ <div class="section">
439
+ <h2 class="section-title">⚙️ Technical Specifications</h2>
440
+ <p><strong>FULL SETTINGS and optional Reformed Pastor character card</strong> <a href="https://huggingface.co/sleepdeprived3/Pastor-Jim" class="link-button">Pastor-Jim</a></p>
441
+ <div class="quant-links">
442
+ <div class="link-card">
443
+ <h3>GGUF</h3>
444
+ <a href="https://huggingface.co/mradermacher/Reformed-Baptist-1689-Bible-Expert-v2.0-12B-GGUF" class="link-button">Quants</a>
445
+ </div>
446
+ <div class="link-card">
447
+ <h3>imatrix</h3>
448
+ <a href="https://huggingface.co/mradermacher/Reformed-Baptist-1689-Bible-Expert-v2.0-12B-i1-GGUF" class="link-button">Quants</a>
449
+ </div>
450
+ <div class="link-card">
451
+ <h3>EXL2</h3>
452
+ <a href="https://huggingface.co/collections/sleepdeprived3/reformed-baptist-1689-bible-expert-v20-12b-exl2-68129a3fddff8278b5c80887" class="link-button">Quants</a>
453
+ </div>
454
+ </div>
455
+ </div>
456
+
457
+ <div class="section">
458
+ Chat Template: Mistral V3 Tekken
459
+ Recommended deterministic sampler for theological precision:
460
+ "temperature": 0
461
+ "top_k": 1
462
+ "dry_multiplier": 0.01
463
+ </div>
464
+
465
+ <div class="section">
466
+ <h2 class="section-title">📜 Key Features</h2>
467
+ <ul>
468
+ <li>🕊️ Answers theological questions from a 1689 Reformed Baptist perspective</li>
469
+ <li>✝️ Explains Scripture through covenant theology and grammatical-historical interpretation</li>
470
+ <li>🌍 Multilingual support for Reformed ministry in 10+ languages</li>
471
+ <li>🎓 Enhanced capabilities for expository preaching and catechism development</li>
472
+ <li>💬 Advanced handling of Reformed distinctives: effectual calling, definite atonement, perseverance of saints</li>
473
+ <li>📖 Specializes in Baptist covenant theology and regulative principle of worship</li>
474
+ </ul>
475
+ </div>
476
+
477
+ <div class="section">
478
+ <h2 class="section-title">⚠️ Ethical Considerations</h2>
479
+ <p>This model is designed to:</p>
480
+ <ul>
481
+ <li>Maintain strict fidelity to the 1689 London Baptist Confession</li>
482
+ <li>Promote the Five Solas and Reformed soteriology</li>
483
+ <li>Support but never replace ordained eldership and church courts</li>
484
+ </ul>
485
+ </div>
486
+
487
+ <div class="section">
488
+ <h2 class="section-title">📖 Performance Notes</h2>
489
+ <ul>
490
+ <li>🔥 Maintains confessional accuracy in complex theological discussions</li>
491
+ <li>📖 Handles covenant theology analysis with improved consistency</li>
492
+ <li>🧠 Excels at tracing redemptive history through Scripture</li>
493
+ <li>⚡ Improved handling of Reformed polemics and historical theology</li>
494
+ <li>🎭 Responds to nuanced distinctions between Reformed traditions</li>
495
+ </ul>
496
+ </div>
497
+
498
+ <div class="section">
499
+ <h2 class="section-title">🧑‍🔬 Model Authors</h2>
500
+ <ul>
501
+ <li>sleepdeprived3 (Training Data & Fine-Tuning)</li>
502
+ </ul>
503
+ </div>
504
+
505
+ <script>
506
+ // Simple script to update the date
507
+ document.addEventListener('DOMContentLoaded', function() {
508
+ const dateElement = document.createElement('div');
509
+ dateElement.style.textAlign = 'center';
510
+ dateElement.style.marginTop = '20px';
511
+ dateElement.style.opacity = '0.7';
512
+ dateElement.textContent = 'Last updated: ' + new Date().toLocaleDateString();
513
+ document.querySelector('.container').appendChild(dateElement);
514
+ });
515
+ </script>
config.json ADDED
@@ -0,0 +1,37 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "MistralForCausalLM"
4
+ ],
5
+ "attention_dropout": 0.0,
6
+ "bos_token_id": 1,
7
+ "eos_token_id": 2,
8
+ "head_dim": 128,
9
+ "hidden_act": "silu",
10
+ "hidden_size": 5120,
11
+ "initializer_range": 0.02,
12
+ "intermediate_size": 14336,
13
+ "max_position_embeddings": 131072,
14
+ "model_type": "mistral",
15
+ "num_attention_heads": 32,
16
+ "num_hidden_layers": 40,
17
+ "num_key_value_heads": 8,
18
+ "rms_norm_eps": 1e-05,
19
+ "rope_theta": 1000000.0,
20
+ "sliding_window": null,
21
+ "tie_word_embeddings": false,
22
+ "torch_dtype": "bfloat16",
23
+ "transformers_version": "4.51.3",
24
+ "use_cache": true,
25
+ "vocab_size": 131072,
26
+ "quantization_config": {
27
+ "quant_method": "exl2",
28
+ "version": "0.2.7",
29
+ "bits": 6.0,
30
+ "head_bits": 8,
31
+ "calibration": {
32
+ "rows": 115,
33
+ "length": 2048,
34
+ "dataset": "(default)"
35
+ }
36
+ }
37
+ }
generation_config.json ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 1,
4
+ "eos_token_id": 2,
5
+ "transformers_version": "4.51.3"
6
+ }
huggingface-metadata.txt ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ url: https://huggingface.co/sleepdeprived3/Reformed-Baptist-1689-Bible-Expert-v2.0-12B
2
+ branch: main
3
+ download date: 2025-04-30 16:54:17
4
+ sha256sum:
5
+ 1d118ad961e2309e6b9a04054c05fc17ed921652e933560dd78518f152a333ab model-00001-of-00005.safetensors
6
+ 44e785492b27766e4cf8e3a2ae4006609e31af944c556aff880ee0be8d3ad226 model-00002-of-00005.safetensors
7
+ d93bf456a377edbb7da1c704c2de8ded667b3845b943774ecf60287e2385344a model-00003-of-00005.safetensors
8
+ 3a3ab247130a9f744037d17cfe6f90d6cb4d737bd0fff9e1a669a5da381dae12 model-00004-of-00005.safetensors
9
+ 17ba62b2af750e8eb0cc424fc281393f1470794820503e95578a33ffcc50765a model-00005-of-00005.safetensors
10
+ b0240ce510f08e6c2041724e9043e33be9d251d1e4a4d94eb68cd47b954b61d2 tokenizer.json
measurement.json ADDED
The diff for this file is too large to render. See raw diff
 
model.safetensors.index.json ADDED
@@ -0,0 +1,370 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "metadata": {
3
+ "total_size": 24495564800
4
+ },
5
+ "weight_map": {
6
+ "lm_head.weight": "model-00005-of-00005.safetensors",
7
+ "model.embed_tokens.weight": "model-00001-of-00005.safetensors",
8
+ "model.layers.0.input_layernorm.weight": "model-00001-of-00005.safetensors",
9
+ "model.layers.0.mlp.down_proj.weight": "model-00001-of-00005.safetensors",
10
+ "model.layers.0.mlp.gate_proj.weight": "model-00001-of-00005.safetensors",
11
+ "model.layers.0.mlp.up_proj.weight": "model-00001-of-00005.safetensors",
12
+ "model.layers.0.post_attention_layernorm.weight": "model-00001-of-00005.safetensors",
13
+ "model.layers.0.self_attn.k_proj.weight": "model-00001-of-00005.safetensors",
14
+ "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00005.safetensors",
15
+ "model.layers.0.self_attn.q_proj.weight": "model-00001-of-00005.safetensors",
16
+ "model.layers.0.self_attn.v_proj.weight": "model-00001-of-00005.safetensors",
17
+ "model.layers.1.input_layernorm.weight": "model-00001-of-00005.safetensors",
18
+ "model.layers.1.mlp.down_proj.weight": "model-00001-of-00005.safetensors",
19
+ "model.layers.1.mlp.gate_proj.weight": "model-00001-of-00005.safetensors",
20
+ "model.layers.1.mlp.up_proj.weight": "model-00001-of-00005.safetensors",
21
+ "model.layers.1.post_attention_layernorm.weight": "model-00001-of-00005.safetensors",
22
+ "model.layers.1.self_attn.k_proj.weight": "model-00001-of-00005.safetensors",
23
+ "model.layers.1.self_attn.o_proj.weight": "model-00001-of-00005.safetensors",
24
+ "model.layers.1.self_attn.q_proj.weight": "model-00001-of-00005.safetensors",
25
+ "model.layers.1.self_attn.v_proj.weight": "model-00001-of-00005.safetensors",
26
+ "model.layers.10.input_layernorm.weight": "model-00002-of-00005.safetensors",
27
+ "model.layers.10.mlp.down_proj.weight": "model-00002-of-00005.safetensors",
28
+ "model.layers.10.mlp.gate_proj.weight": "model-00002-of-00005.safetensors",
29
+ "model.layers.10.mlp.up_proj.weight": "model-00002-of-00005.safetensors",
30
+ "model.layers.10.post_attention_layernorm.weight": "model-00002-of-00005.safetensors",
31
+ "model.layers.10.self_attn.k_proj.weight": "model-00002-of-00005.safetensors",
32
+ "model.layers.10.self_attn.o_proj.weight": "model-00002-of-00005.safetensors",
33
+ "model.layers.10.self_attn.q_proj.weight": "model-00002-of-00005.safetensors",
34
+ "model.layers.10.self_attn.v_proj.weight": "model-00002-of-00005.safetensors",
35
+ "model.layers.11.input_layernorm.weight": "model-00002-of-00005.safetensors",
36
+ "model.layers.11.mlp.down_proj.weight": "model-00002-of-00005.safetensors",
37
+ "model.layers.11.mlp.gate_proj.weight": "model-00002-of-00005.safetensors",
38
+ "model.layers.11.mlp.up_proj.weight": "model-00002-of-00005.safetensors",
39
+ "model.layers.11.post_attention_layernorm.weight": "model-00002-of-00005.safetensors",
40
+ "model.layers.11.self_attn.k_proj.weight": "model-00002-of-00005.safetensors",
41
+ "model.layers.11.self_attn.o_proj.weight": "model-00002-of-00005.safetensors",
42
+ "model.layers.11.self_attn.q_proj.weight": "model-00002-of-00005.safetensors",
43
+ "model.layers.11.self_attn.v_proj.weight": "model-00002-of-00005.safetensors",
44
+ "model.layers.12.input_layernorm.weight": "model-00002-of-00005.safetensors",
45
+ "model.layers.12.mlp.down_proj.weight": "model-00002-of-00005.safetensors",
46
+ "model.layers.12.mlp.gate_proj.weight": "model-00002-of-00005.safetensors",
47
+ "model.layers.12.mlp.up_proj.weight": "model-00002-of-00005.safetensors",
48
+ "model.layers.12.post_attention_layernorm.weight": "model-00002-of-00005.safetensors",
49
+ "model.layers.12.self_attn.k_proj.weight": "model-00002-of-00005.safetensors",
50
+ "model.layers.12.self_attn.o_proj.weight": "model-00002-of-00005.safetensors",
51
+ "model.layers.12.self_attn.q_proj.weight": "model-00002-of-00005.safetensors",
52
+ "model.layers.12.self_attn.v_proj.weight": "model-00002-of-00005.safetensors",
53
+ "model.layers.13.input_layernorm.weight": "model-00002-of-00005.safetensors",
54
+ "model.layers.13.mlp.down_proj.weight": "model-00002-of-00005.safetensors",
55
+ "model.layers.13.mlp.gate_proj.weight": "model-00002-of-00005.safetensors",
56
+ "model.layers.13.mlp.up_proj.weight": "model-00002-of-00005.safetensors",
57
+ "model.layers.13.post_attention_layernorm.weight": "model-00002-of-00005.safetensors",
58
+ "model.layers.13.self_attn.k_proj.weight": "model-00002-of-00005.safetensors",
59
+ "model.layers.13.self_attn.o_proj.weight": "model-00002-of-00005.safetensors",
60
+ "model.layers.13.self_attn.q_proj.weight": "model-00002-of-00005.safetensors",
61
+ "model.layers.13.self_attn.v_proj.weight": "model-00002-of-00005.safetensors",
62
+ "model.layers.14.input_layernorm.weight": "model-00002-of-00005.safetensors",
63
+ "model.layers.14.mlp.down_proj.weight": "model-00002-of-00005.safetensors",
64
+ "model.layers.14.mlp.gate_proj.weight": "model-00002-of-00005.safetensors",
65
+ "model.layers.14.mlp.up_proj.weight": "model-00002-of-00005.safetensors",
66
+ "model.layers.14.post_attention_layernorm.weight": "model-00002-of-00005.safetensors",
67
+ "model.layers.14.self_attn.k_proj.weight": "model-00002-of-00005.safetensors",
68
+ "model.layers.14.self_attn.o_proj.weight": "model-00002-of-00005.safetensors",
69
+ "model.layers.14.self_attn.q_proj.weight": "model-00002-of-00005.safetensors",
70
+ "model.layers.14.self_attn.v_proj.weight": "model-00002-of-00005.safetensors",
71
+ "model.layers.15.input_layernorm.weight": "model-00003-of-00005.safetensors",
72
+ "model.layers.15.mlp.down_proj.weight": "model-00003-of-00005.safetensors",
73
+ "model.layers.15.mlp.gate_proj.weight": "model-00002-of-00005.safetensors",
74
+ "model.layers.15.mlp.up_proj.weight": "model-00003-of-00005.safetensors",
75
+ "model.layers.15.post_attention_layernorm.weight": "model-00003-of-00005.safetensors",
76
+ "model.layers.15.self_attn.k_proj.weight": "model-00002-of-00005.safetensors",
77
+ "model.layers.15.self_attn.o_proj.weight": "model-00002-of-00005.safetensors",
78
+ "model.layers.15.self_attn.q_proj.weight": "model-00002-of-00005.safetensors",
79
+ "model.layers.15.self_attn.v_proj.weight": "model-00002-of-00005.safetensors",
80
+ "model.layers.16.input_layernorm.weight": "model-00003-of-00005.safetensors",
81
+ "model.layers.16.mlp.down_proj.weight": "model-00003-of-00005.safetensors",
82
+ "model.layers.16.mlp.gate_proj.weight": "model-00003-of-00005.safetensors",
83
+ "model.layers.16.mlp.up_proj.weight": "model-00003-of-00005.safetensors",
84
+ "model.layers.16.post_attention_layernorm.weight": "model-00003-of-00005.safetensors",
85
+ "model.layers.16.self_attn.k_proj.weight": "model-00003-of-00005.safetensors",
86
+ "model.layers.16.self_attn.o_proj.weight": "model-00003-of-00005.safetensors",
87
+ "model.layers.16.self_attn.q_proj.weight": "model-00003-of-00005.safetensors",
88
+ "model.layers.16.self_attn.v_proj.weight": "model-00003-of-00005.safetensors",
89
+ "model.layers.17.input_layernorm.weight": "model-00003-of-00005.safetensors",
90
+ "model.layers.17.mlp.down_proj.weight": "model-00003-of-00005.safetensors",
91
+ "model.layers.17.mlp.gate_proj.weight": "model-00003-of-00005.safetensors",
92
+ "model.layers.17.mlp.up_proj.weight": "model-00003-of-00005.safetensors",
93
+ "model.layers.17.post_attention_layernorm.weight": "model-00003-of-00005.safetensors",
94
+ "model.layers.17.self_attn.k_proj.weight": "model-00003-of-00005.safetensors",
95
+ "model.layers.17.self_attn.o_proj.weight": "model-00003-of-00005.safetensors",
96
+ "model.layers.17.self_attn.q_proj.weight": "model-00003-of-00005.safetensors",
97
+ "model.layers.17.self_attn.v_proj.weight": "model-00003-of-00005.safetensors",
98
+ "model.layers.18.input_layernorm.weight": "model-00003-of-00005.safetensors",
99
+ "model.layers.18.mlp.down_proj.weight": "model-00003-of-00005.safetensors",
100
+ "model.layers.18.mlp.gate_proj.weight": "model-00003-of-00005.safetensors",
101
+ "model.layers.18.mlp.up_proj.weight": "model-00003-of-00005.safetensors",
102
+ "model.layers.18.post_attention_layernorm.weight": "model-00003-of-00005.safetensors",
103
+ "model.layers.18.self_attn.k_proj.weight": "model-00003-of-00005.safetensors",
104
+ "model.layers.18.self_attn.o_proj.weight": "model-00003-of-00005.safetensors",
105
+ "model.layers.18.self_attn.q_proj.weight": "model-00003-of-00005.safetensors",
106
+ "model.layers.18.self_attn.v_proj.weight": "model-00003-of-00005.safetensors",
107
+ "model.layers.19.input_layernorm.weight": "model-00003-of-00005.safetensors",
108
+ "model.layers.19.mlp.down_proj.weight": "model-00003-of-00005.safetensors",
109
+ "model.layers.19.mlp.gate_proj.weight": "model-00003-of-00005.safetensors",
110
+ "model.layers.19.mlp.up_proj.weight": "model-00003-of-00005.safetensors",
111
+ "model.layers.19.post_attention_layernorm.weight": "model-00003-of-00005.safetensors",
112
+ "model.layers.19.self_attn.k_proj.weight": "model-00003-of-00005.safetensors",
113
+ "model.layers.19.self_attn.o_proj.weight": "model-00003-of-00005.safetensors",
114
+ "model.layers.19.self_attn.q_proj.weight": "model-00003-of-00005.safetensors",
115
+ "model.layers.19.self_attn.v_proj.weight": "model-00003-of-00005.safetensors",
116
+ "model.layers.2.input_layernorm.weight": "model-00001-of-00005.safetensors",
117
+ "model.layers.2.mlp.down_proj.weight": "model-00001-of-00005.safetensors",
118
+ "model.layers.2.mlp.gate_proj.weight": "model-00001-of-00005.safetensors",
119
+ "model.layers.2.mlp.up_proj.weight": "model-00001-of-00005.safetensors",
120
+ "model.layers.2.post_attention_layernorm.weight": "model-00001-of-00005.safetensors",
121
+ "model.layers.2.self_attn.k_proj.weight": "model-00001-of-00005.safetensors",
122
+ "model.layers.2.self_attn.o_proj.weight": "model-00001-of-00005.safetensors",
123
+ "model.layers.2.self_attn.q_proj.weight": "model-00001-of-00005.safetensors",
124
+ "model.layers.2.self_attn.v_proj.weight": "model-00001-of-00005.safetensors",
125
+ "model.layers.20.input_layernorm.weight": "model-00003-of-00005.safetensors",
126
+ "model.layers.20.mlp.down_proj.weight": "model-00003-of-00005.safetensors",
127
+ "model.layers.20.mlp.gate_proj.weight": "model-00003-of-00005.safetensors",
128
+ "model.layers.20.mlp.up_proj.weight": "model-00003-of-00005.safetensors",
129
+ "model.layers.20.post_attention_layernorm.weight": "model-00003-of-00005.safetensors",
130
+ "model.layers.20.self_attn.k_proj.weight": "model-00003-of-00005.safetensors",
131
+ "model.layers.20.self_attn.o_proj.weight": "model-00003-of-00005.safetensors",
132
+ "model.layers.20.self_attn.q_proj.weight": "model-00003-of-00005.safetensors",
133
+ "model.layers.20.self_attn.v_proj.weight": "model-00003-of-00005.safetensors",
134
+ "model.layers.21.input_layernorm.weight": "model-00003-of-00005.safetensors",
135
+ "model.layers.21.mlp.down_proj.weight": "model-00003-of-00005.safetensors",
136
+ "model.layers.21.mlp.gate_proj.weight": "model-00003-of-00005.safetensors",
137
+ "model.layers.21.mlp.up_proj.weight": "model-00003-of-00005.safetensors",
138
+ "model.layers.21.post_attention_layernorm.weight": "model-00003-of-00005.safetensors",
139
+ "model.layers.21.self_attn.k_proj.weight": "model-00003-of-00005.safetensors",
140
+ "model.layers.21.self_attn.o_proj.weight": "model-00003-of-00005.safetensors",
141
+ "model.layers.21.self_attn.q_proj.weight": "model-00003-of-00005.safetensors",
142
+ "model.layers.21.self_attn.v_proj.weight": "model-00003-of-00005.safetensors",
143
+ "model.layers.22.input_layernorm.weight": "model-00003-of-00005.safetensors",
144
+ "model.layers.22.mlp.down_proj.weight": "model-00003-of-00005.safetensors",
145
+ "model.layers.22.mlp.gate_proj.weight": "model-00003-of-00005.safetensors",
146
+ "model.layers.22.mlp.up_proj.weight": "model-00003-of-00005.safetensors",
147
+ "model.layers.22.post_attention_layernorm.weight": "model-00003-of-00005.safetensors",
148
+ "model.layers.22.self_attn.k_proj.weight": "model-00003-of-00005.safetensors",
149
+ "model.layers.22.self_attn.o_proj.weight": "model-00003-of-00005.safetensors",
150
+ "model.layers.22.self_attn.q_proj.weight": "model-00003-of-00005.safetensors",
151
+ "model.layers.22.self_attn.v_proj.weight": "model-00003-of-00005.safetensors",
152
+ "model.layers.23.input_layernorm.weight": "model-00003-of-00005.safetensors",
153
+ "model.layers.23.mlp.down_proj.weight": "model-00003-of-00005.safetensors",
154
+ "model.layers.23.mlp.gate_proj.weight": "model-00003-of-00005.safetensors",
155
+ "model.layers.23.mlp.up_proj.weight": "model-00003-of-00005.safetensors",
156
+ "model.layers.23.post_attention_layernorm.weight": "model-00003-of-00005.safetensors",
157
+ "model.layers.23.self_attn.k_proj.weight": "model-00003-of-00005.safetensors",
158
+ "model.layers.23.self_attn.o_proj.weight": "model-00003-of-00005.safetensors",
159
+ "model.layers.23.self_attn.q_proj.weight": "model-00003-of-00005.safetensors",
160
+ "model.layers.23.self_attn.v_proj.weight": "model-00003-of-00005.safetensors",
161
+ "model.layers.24.input_layernorm.weight": "model-00004-of-00005.safetensors",
162
+ "model.layers.24.mlp.down_proj.weight": "model-00004-of-00005.safetensors",
163
+ "model.layers.24.mlp.gate_proj.weight": "model-00003-of-00005.safetensors",
164
+ "model.layers.24.mlp.up_proj.weight": "model-00004-of-00005.safetensors",
165
+ "model.layers.24.post_attention_layernorm.weight": "model-00004-of-00005.safetensors",
166
+ "model.layers.24.self_attn.k_proj.weight": "model-00003-of-00005.safetensors",
167
+ "model.layers.24.self_attn.o_proj.weight": "model-00003-of-00005.safetensors",
168
+ "model.layers.24.self_attn.q_proj.weight": "model-00003-of-00005.safetensors",
169
+ "model.layers.24.self_attn.v_proj.weight": "model-00003-of-00005.safetensors",
170
+ "model.layers.25.input_layernorm.weight": "model-00004-of-00005.safetensors",
171
+ "model.layers.25.mlp.down_proj.weight": "model-00004-of-00005.safetensors",
172
+ "model.layers.25.mlp.gate_proj.weight": "model-00004-of-00005.safetensors",
173
+ "model.layers.25.mlp.up_proj.weight": "model-00004-of-00005.safetensors",
174
+ "model.layers.25.post_attention_layernorm.weight": "model-00004-of-00005.safetensors",
175
+ "model.layers.25.self_attn.k_proj.weight": "model-00004-of-00005.safetensors",
176
+ "model.layers.25.self_attn.o_proj.weight": "model-00004-of-00005.safetensors",
177
+ "model.layers.25.self_attn.q_proj.weight": "model-00004-of-00005.safetensors",
178
+ "model.layers.25.self_attn.v_proj.weight": "model-00004-of-00005.safetensors",
179
+ "model.layers.26.input_layernorm.weight": "model-00004-of-00005.safetensors",
180
+ "model.layers.26.mlp.down_proj.weight": "model-00004-of-00005.safetensors",
181
+ "model.layers.26.mlp.gate_proj.weight": "model-00004-of-00005.safetensors",
182
+ "model.layers.26.mlp.up_proj.weight": "model-00004-of-00005.safetensors",
183
+ "model.layers.26.post_attention_layernorm.weight": "model-00004-of-00005.safetensors",
184
+ "model.layers.26.self_attn.k_proj.weight": "model-00004-of-00005.safetensors",
185
+ "model.layers.26.self_attn.o_proj.weight": "model-00004-of-00005.safetensors",
186
+ "model.layers.26.self_attn.q_proj.weight": "model-00004-of-00005.safetensors",
187
+ "model.layers.26.self_attn.v_proj.weight": "model-00004-of-00005.safetensors",
188
+ "model.layers.27.input_layernorm.weight": "model-00004-of-00005.safetensors",
189
+ "model.layers.27.mlp.down_proj.weight": "model-00004-of-00005.safetensors",
190
+ "model.layers.27.mlp.gate_proj.weight": "model-00004-of-00005.safetensors",
191
+ "model.layers.27.mlp.up_proj.weight": "model-00004-of-00005.safetensors",
192
+ "model.layers.27.post_attention_layernorm.weight": "model-00004-of-00005.safetensors",
193
+ "model.layers.27.self_attn.k_proj.weight": "model-00004-of-00005.safetensors",
194
+ "model.layers.27.self_attn.o_proj.weight": "model-00004-of-00005.safetensors",
195
+ "model.layers.27.self_attn.q_proj.weight": "model-00004-of-00005.safetensors",
196
+ "model.layers.27.self_attn.v_proj.weight": "model-00004-of-00005.safetensors",
197
+ "model.layers.28.input_layernorm.weight": "model-00004-of-00005.safetensors",
198
+ "model.layers.28.mlp.down_proj.weight": "model-00004-of-00005.safetensors",
199
+ "model.layers.28.mlp.gate_proj.weight": "model-00004-of-00005.safetensors",
200
+ "model.layers.28.mlp.up_proj.weight": "model-00004-of-00005.safetensors",
201
+ "model.layers.28.post_attention_layernorm.weight": "model-00004-of-00005.safetensors",
202
+ "model.layers.28.self_attn.k_proj.weight": "model-00004-of-00005.safetensors",
203
+ "model.layers.28.self_attn.o_proj.weight": "model-00004-of-00005.safetensors",
204
+ "model.layers.28.self_attn.q_proj.weight": "model-00004-of-00005.safetensors",
205
+ "model.layers.28.self_attn.v_proj.weight": "model-00004-of-00005.safetensors",
206
+ "model.layers.29.input_layernorm.weight": "model-00004-of-00005.safetensors",
207
+ "model.layers.29.mlp.down_proj.weight": "model-00004-of-00005.safetensors",
208
+ "model.layers.29.mlp.gate_proj.weight": "model-00004-of-00005.safetensors",
209
+ "model.layers.29.mlp.up_proj.weight": "model-00004-of-00005.safetensors",
210
+ "model.layers.29.post_attention_layernorm.weight": "model-00004-of-00005.safetensors",
211
+ "model.layers.29.self_attn.k_proj.weight": "model-00004-of-00005.safetensors",
212
+ "model.layers.29.self_attn.o_proj.weight": "model-00004-of-00005.safetensors",
213
+ "model.layers.29.self_attn.q_proj.weight": "model-00004-of-00005.safetensors",
214
+ "model.layers.29.self_attn.v_proj.weight": "model-00004-of-00005.safetensors",
215
+ "model.layers.3.input_layernorm.weight": "model-00001-of-00005.safetensors",
216
+ "model.layers.3.mlp.down_proj.weight": "model-00001-of-00005.safetensors",
217
+ "model.layers.3.mlp.gate_proj.weight": "model-00001-of-00005.safetensors",
218
+ "model.layers.3.mlp.up_proj.weight": "model-00001-of-00005.safetensors",
219
+ "model.layers.3.post_attention_layernorm.weight": "model-00001-of-00005.safetensors",
220
+ "model.layers.3.self_attn.k_proj.weight": "model-00001-of-00005.safetensors",
221
+ "model.layers.3.self_attn.o_proj.weight": "model-00001-of-00005.safetensors",
222
+ "model.layers.3.self_attn.q_proj.weight": "model-00001-of-00005.safetensors",
223
+ "model.layers.3.self_attn.v_proj.weight": "model-00001-of-00005.safetensors",
224
+ "model.layers.30.input_layernorm.weight": "model-00004-of-00005.safetensors",
225
+ "model.layers.30.mlp.down_proj.weight": "model-00004-of-00005.safetensors",
226
+ "model.layers.30.mlp.gate_proj.weight": "model-00004-of-00005.safetensors",
227
+ "model.layers.30.mlp.up_proj.weight": "model-00004-of-00005.safetensors",
228
+ "model.layers.30.post_attention_layernorm.weight": "model-00004-of-00005.safetensors",
229
+ "model.layers.30.self_attn.k_proj.weight": "model-00004-of-00005.safetensors",
230
+ "model.layers.30.self_attn.o_proj.weight": "model-00004-of-00005.safetensors",
231
+ "model.layers.30.self_attn.q_proj.weight": "model-00004-of-00005.safetensors",
232
+ "model.layers.30.self_attn.v_proj.weight": "model-00004-of-00005.safetensors",
233
+ "model.layers.31.input_layernorm.weight": "model-00004-of-00005.safetensors",
234
+ "model.layers.31.mlp.down_proj.weight": "model-00004-of-00005.safetensors",
235
+ "model.layers.31.mlp.gate_proj.weight": "model-00004-of-00005.safetensors",
236
+ "model.layers.31.mlp.up_proj.weight": "model-00004-of-00005.safetensors",
237
+ "model.layers.31.post_attention_layernorm.weight": "model-00004-of-00005.safetensors",
238
+ "model.layers.31.self_attn.k_proj.weight": "model-00004-of-00005.safetensors",
239
+ "model.layers.31.self_attn.o_proj.weight": "model-00004-of-00005.safetensors",
240
+ "model.layers.31.self_attn.q_proj.weight": "model-00004-of-00005.safetensors",
241
+ "model.layers.31.self_attn.v_proj.weight": "model-00004-of-00005.safetensors",
242
+ "model.layers.32.input_layernorm.weight": "model-00004-of-00005.safetensors",
243
+ "model.layers.32.mlp.down_proj.weight": "model-00004-of-00005.safetensors",
244
+ "model.layers.32.mlp.gate_proj.weight": "model-00004-of-00005.safetensors",
245
+ "model.layers.32.mlp.up_proj.weight": "model-00004-of-00005.safetensors",
246
+ "model.layers.32.post_attention_layernorm.weight": "model-00004-of-00005.safetensors",
247
+ "model.layers.32.self_attn.k_proj.weight": "model-00004-of-00005.safetensors",
248
+ "model.layers.32.self_attn.o_proj.weight": "model-00004-of-00005.safetensors",
249
+ "model.layers.32.self_attn.q_proj.weight": "model-00004-of-00005.safetensors",
250
+ "model.layers.32.self_attn.v_proj.weight": "model-00004-of-00005.safetensors",
251
+ "model.layers.33.input_layernorm.weight": "model-00005-of-00005.safetensors",
252
+ "model.layers.33.mlp.down_proj.weight": "model-00005-of-00005.safetensors",
253
+ "model.layers.33.mlp.gate_proj.weight": "model-00004-of-00005.safetensors",
254
+ "model.layers.33.mlp.up_proj.weight": "model-00005-of-00005.safetensors",
255
+ "model.layers.33.post_attention_layernorm.weight": "model-00005-of-00005.safetensors",
256
+ "model.layers.33.self_attn.k_proj.weight": "model-00004-of-00005.safetensors",
257
+ "model.layers.33.self_attn.o_proj.weight": "model-00004-of-00005.safetensors",
258
+ "model.layers.33.self_attn.q_proj.weight": "model-00004-of-00005.safetensors",
259
+ "model.layers.33.self_attn.v_proj.weight": "model-00004-of-00005.safetensors",
260
+ "model.layers.34.input_layernorm.weight": "model-00005-of-00005.safetensors",
261
+ "model.layers.34.mlp.down_proj.weight": "model-00005-of-00005.safetensors",
262
+ "model.layers.34.mlp.gate_proj.weight": "model-00005-of-00005.safetensors",
263
+ "model.layers.34.mlp.up_proj.weight": "model-00005-of-00005.safetensors",
264
+ "model.layers.34.post_attention_layernorm.weight": "model-00005-of-00005.safetensors",
265
+ "model.layers.34.self_attn.k_proj.weight": "model-00005-of-00005.safetensors",
266
+ "model.layers.34.self_attn.o_proj.weight": "model-00005-of-00005.safetensors",
267
+ "model.layers.34.self_attn.q_proj.weight": "model-00005-of-00005.safetensors",
268
+ "model.layers.34.self_attn.v_proj.weight": "model-00005-of-00005.safetensors",
269
+ "model.layers.35.input_layernorm.weight": "model-00005-of-00005.safetensors",
270
+ "model.layers.35.mlp.down_proj.weight": "model-00005-of-00005.safetensors",
271
+ "model.layers.35.mlp.gate_proj.weight": "model-00005-of-00005.safetensors",
272
+ "model.layers.35.mlp.up_proj.weight": "model-00005-of-00005.safetensors",
273
+ "model.layers.35.post_attention_layernorm.weight": "model-00005-of-00005.safetensors",
274
+ "model.layers.35.self_attn.k_proj.weight": "model-00005-of-00005.safetensors",
275
+ "model.layers.35.self_attn.o_proj.weight": "model-00005-of-00005.safetensors",
276
+ "model.layers.35.self_attn.q_proj.weight": "model-00005-of-00005.safetensors",
277
+ "model.layers.35.self_attn.v_proj.weight": "model-00005-of-00005.safetensors",
278
+ "model.layers.36.input_layernorm.weight": "model-00005-of-00005.safetensors",
279
+ "model.layers.36.mlp.down_proj.weight": "model-00005-of-00005.safetensors",
280
+ "model.layers.36.mlp.gate_proj.weight": "model-00005-of-00005.safetensors",
281
+ "model.layers.36.mlp.up_proj.weight": "model-00005-of-00005.safetensors",
282
+ "model.layers.36.post_attention_layernorm.weight": "model-00005-of-00005.safetensors",
283
+ "model.layers.36.self_attn.k_proj.weight": "model-00005-of-00005.safetensors",
284
+ "model.layers.36.self_attn.o_proj.weight": "model-00005-of-00005.safetensors",
285
+ "model.layers.36.self_attn.q_proj.weight": "model-00005-of-00005.safetensors",
286
+ "model.layers.36.self_attn.v_proj.weight": "model-00005-of-00005.safetensors",
287
+ "model.layers.37.input_layernorm.weight": "model-00005-of-00005.safetensors",
288
+ "model.layers.37.mlp.down_proj.weight": "model-00005-of-00005.safetensors",
289
+ "model.layers.37.mlp.gate_proj.weight": "model-00005-of-00005.safetensors",
290
+ "model.layers.37.mlp.up_proj.weight": "model-00005-of-00005.safetensors",
291
+ "model.layers.37.post_attention_layernorm.weight": "model-00005-of-00005.safetensors",
292
+ "model.layers.37.self_attn.k_proj.weight": "model-00005-of-00005.safetensors",
293
+ "model.layers.37.self_attn.o_proj.weight": "model-00005-of-00005.safetensors",
294
+ "model.layers.37.self_attn.q_proj.weight": "model-00005-of-00005.safetensors",
295
+ "model.layers.37.self_attn.v_proj.weight": "model-00005-of-00005.safetensors",
296
+ "model.layers.38.input_layernorm.weight": "model-00005-of-00005.safetensors",
297
+ "model.layers.38.mlp.down_proj.weight": "model-00005-of-00005.safetensors",
298
+ "model.layers.38.mlp.gate_proj.weight": "model-00005-of-00005.safetensors",
299
+ "model.layers.38.mlp.up_proj.weight": "model-00005-of-00005.safetensors",
300
+ "model.layers.38.post_attention_layernorm.weight": "model-00005-of-00005.safetensors",
301
+ "model.layers.38.self_attn.k_proj.weight": "model-00005-of-00005.safetensors",
302
+ "model.layers.38.self_attn.o_proj.weight": "model-00005-of-00005.safetensors",
303
+ "model.layers.38.self_attn.q_proj.weight": "model-00005-of-00005.safetensors",
304
+ "model.layers.38.self_attn.v_proj.weight": "model-00005-of-00005.safetensors",
305
+ "model.layers.39.input_layernorm.weight": "model-00005-of-00005.safetensors",
306
+ "model.layers.39.mlp.down_proj.weight": "model-00005-of-00005.safetensors",
307
+ "model.layers.39.mlp.gate_proj.weight": "model-00005-of-00005.safetensors",
308
+ "model.layers.39.mlp.up_proj.weight": "model-00005-of-00005.safetensors",
309
+ "model.layers.39.post_attention_layernorm.weight": "model-00005-of-00005.safetensors",
310
+ "model.layers.39.self_attn.k_proj.weight": "model-00005-of-00005.safetensors",
311
+ "model.layers.39.self_attn.o_proj.weight": "model-00005-of-00005.safetensors",
312
+ "model.layers.39.self_attn.q_proj.weight": "model-00005-of-00005.safetensors",
313
+ "model.layers.39.self_attn.v_proj.weight": "model-00005-of-00005.safetensors",
314
+ "model.layers.4.input_layernorm.weight": "model-00001-of-00005.safetensors",
315
+ "model.layers.4.mlp.down_proj.weight": "model-00001-of-00005.safetensors",
316
+ "model.layers.4.mlp.gate_proj.weight": "model-00001-of-00005.safetensors",
317
+ "model.layers.4.mlp.up_proj.weight": "model-00001-of-00005.safetensors",
318
+ "model.layers.4.post_attention_layernorm.weight": "model-00001-of-00005.safetensors",
319
+ "model.layers.4.self_attn.k_proj.weight": "model-00001-of-00005.safetensors",
320
+ "model.layers.4.self_attn.o_proj.weight": "model-00001-of-00005.safetensors",
321
+ "model.layers.4.self_attn.q_proj.weight": "model-00001-of-00005.safetensors",
322
+ "model.layers.4.self_attn.v_proj.weight": "model-00001-of-00005.safetensors",
323
+ "model.layers.5.input_layernorm.weight": "model-00001-of-00005.safetensors",
324
+ "model.layers.5.mlp.down_proj.weight": "model-00001-of-00005.safetensors",
325
+ "model.layers.5.mlp.gate_proj.weight": "model-00001-of-00005.safetensors",
326
+ "model.layers.5.mlp.up_proj.weight": "model-00001-of-00005.safetensors",
327
+ "model.layers.5.post_attention_layernorm.weight": "model-00001-of-00005.safetensors",
328
+ "model.layers.5.self_attn.k_proj.weight": "model-00001-of-00005.safetensors",
329
+ "model.layers.5.self_attn.o_proj.weight": "model-00001-of-00005.safetensors",
330
+ "model.layers.5.self_attn.q_proj.weight": "model-00001-of-00005.safetensors",
331
+ "model.layers.5.self_attn.v_proj.weight": "model-00001-of-00005.safetensors",
332
+ "model.layers.6.input_layernorm.weight": "model-00002-of-00005.safetensors",
333
+ "model.layers.6.mlp.down_proj.weight": "model-00002-of-00005.safetensors",
334
+ "model.layers.6.mlp.gate_proj.weight": "model-00001-of-00005.safetensors",
335
+ "model.layers.6.mlp.up_proj.weight": "model-00002-of-00005.safetensors",
336
+ "model.layers.6.post_attention_layernorm.weight": "model-00002-of-00005.safetensors",
337
+ "model.layers.6.self_attn.k_proj.weight": "model-00001-of-00005.safetensors",
338
+ "model.layers.6.self_attn.o_proj.weight": "model-00001-of-00005.safetensors",
339
+ "model.layers.6.self_attn.q_proj.weight": "model-00001-of-00005.safetensors",
340
+ "model.layers.6.self_attn.v_proj.weight": "model-00001-of-00005.safetensors",
341
+ "model.layers.7.input_layernorm.weight": "model-00002-of-00005.safetensors",
342
+ "model.layers.7.mlp.down_proj.weight": "model-00002-of-00005.safetensors",
343
+ "model.layers.7.mlp.gate_proj.weight": "model-00002-of-00005.safetensors",
344
+ "model.layers.7.mlp.up_proj.weight": "model-00002-of-00005.safetensors",
345
+ "model.layers.7.post_attention_layernorm.weight": "model-00002-of-00005.safetensors",
346
+ "model.layers.7.self_attn.k_proj.weight": "model-00002-of-00005.safetensors",
347
+ "model.layers.7.self_attn.o_proj.weight": "model-00002-of-00005.safetensors",
348
+ "model.layers.7.self_attn.q_proj.weight": "model-00002-of-00005.safetensors",
349
+ "model.layers.7.self_attn.v_proj.weight": "model-00002-of-00005.safetensors",
350
+ "model.layers.8.input_layernorm.weight": "model-00002-of-00005.safetensors",
351
+ "model.layers.8.mlp.down_proj.weight": "model-00002-of-00005.safetensors",
352
+ "model.layers.8.mlp.gate_proj.weight": "model-00002-of-00005.safetensors",
353
+ "model.layers.8.mlp.up_proj.weight": "model-00002-of-00005.safetensors",
354
+ "model.layers.8.post_attention_layernorm.weight": "model-00002-of-00005.safetensors",
355
+ "model.layers.8.self_attn.k_proj.weight": "model-00002-of-00005.safetensors",
356
+ "model.layers.8.self_attn.o_proj.weight": "model-00002-of-00005.safetensors",
357
+ "model.layers.8.self_attn.q_proj.weight": "model-00002-of-00005.safetensors",
358
+ "model.layers.8.self_attn.v_proj.weight": "model-00002-of-00005.safetensors",
359
+ "model.layers.9.input_layernorm.weight": "model-00002-of-00005.safetensors",
360
+ "model.layers.9.mlp.down_proj.weight": "model-00002-of-00005.safetensors",
361
+ "model.layers.9.mlp.gate_proj.weight": "model-00002-of-00005.safetensors",
362
+ "model.layers.9.mlp.up_proj.weight": "model-00002-of-00005.safetensors",
363
+ "model.layers.9.post_attention_layernorm.weight": "model-00002-of-00005.safetensors",
364
+ "model.layers.9.self_attn.k_proj.weight": "model-00002-of-00005.safetensors",
365
+ "model.layers.9.self_attn.o_proj.weight": "model-00002-of-00005.safetensors",
366
+ "model.layers.9.self_attn.q_proj.weight": "model-00002-of-00005.safetensors",
367
+ "model.layers.9.self_attn.v_proj.weight": "model-00002-of-00005.safetensors",
368
+ "model.norm.weight": "model-00005-of-00005.safetensors"
369
+ }
370
+ }
output-00001-of-00002.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:46facc0c081ad6422c50066836bf53533a92ca6099aacd081bcb815a47b66597
3
+ size 8544927932
output-00002-of-00002.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6852d973627e3058b276623723d55f005be7a8bab92d25f5f70df3cd97c3c33c
3
+ size 1649484276
special_tokens_map.json ADDED
@@ -0,0 +1,23 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": {
3
+ "content": "<s>",
4
+ "lstrip": false,
5
+ "normalized": false,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "</s>",
11
+ "lstrip": false,
12
+ "normalized": false,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "unk_token": {
17
+ "content": "<unk>",
18
+ "lstrip": false,
19
+ "normalized": false,
20
+ "rstrip": false,
21
+ "single_word": false
22
+ }
23
+ }
tokenizer.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b0240ce510f08e6c2041724e9043e33be9d251d1e4a4d94eb68cd47b954b61d2
3
+ size 17078292
tokenizer_config.json ADDED
The diff for this file is too large to render. See raw diff