gecfdo commited on
Commit
9514a5b
·
verified ·
1 Parent(s): dee054b

Upload folder using huggingface_hub

Browse files
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ waifu6.webp filter=lfs diff=lfs merge=lfs -text
LICENSE ADDED
@@ -0,0 +1 @@
 
 
1
+ Refer to the original model for licensing information.
README.md ADDED
@@ -0,0 +1,676 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ license: other
3
+ license_name: mrl
4
+ language:
5
+ - en
6
+ base_model:
7
+ - ReadyArt/Omega-Darker_The-Final-Transgression-22B
8
+ base_model_relation: quantized
9
+ quantized_by: gecfdo
10
+ pipeline_tag: text-generation
11
+ tags:
12
+ - nsfw
13
+ - explicit
14
+ - roleplay
15
+ - unaligned
16
+ - ERP
17
+ - Erotic
18
+ - Horror
19
+ - Violence
20
+ ---
21
+
22
+ <style>
23
+ body {
24
+ font-family: 'Quicksand', sans-serif;
25
+ background: linear-gradient(135deg, #0a1a1a 0%, #001010 100%);
26
+ color: #e1ffff !important;
27
+ text-shadow: 0 0 3px rgba(0, 0, 0, 0.7);
28
+ margin: 0;
29
+ padding: 20px;
30
+ transition: all 0.5s ease;
31
+ }
32
+
33
+ @media (prefers-color-scheme: light) {
34
+ body {
35
+ background: linear-gradient(135deg, #e1ffff 0%, #c0f0ff 100%);
36
+ color: #002b36 !important;
37
+ text-shadow: 0 0 3px rgba(255, 255, 255, 0.7);
38
+ }
39
+ }
40
+
41
+ .container {
42
+ min-width: 100%;
43
+ margin: 0 auto;
44
+ max-width: 1200px;
45
+ background: rgba(0, 17, 22, 0.95);
46
+ border-radius: 12px;
47
+ padding: 30px;
48
+ box-shadow: 0 0 20px rgba(0, 255, 255, 0.1);
49
+ border: 1px solid rgba(0, 255, 255, 0.2);
50
+ position: relative;
51
+ overflow: hidden;
52
+ }
53
+
54
+ .container::before {
55
+ content: '';
56
+ position: absolute;
57
+ top: -1px;
58
+ left: -1px;
59
+ right: -1px;
60
+ bottom: -1px;
61
+ border: 1px solid rgba(0, 255, 255, 0.5);
62
+ border-radius: 12px;
63
+ pointer-events: none;
64
+ animation: borderGlow 3s ease-in-out infinite alternate;
65
+ }
66
+
67
+ @keyframes borderGlow {
68
+ 0% {
69
+ box-shadow: 0 0 5px rgba(0, 255, 255, 0.3);
70
+ border-color: rgba(0, 255, 255, 0.5);
71
+ }
72
+ 50% {
73
+ box-shadow: 0 0 15px rgba(255, 0, 255, 0.3);
74
+ border-color: rgba(255, 0, 255, 0.5);
75
+ }
76
+ 100% {
77
+ box-shadow: 0 0 5px rgba(0, 255, 255, 0.3);
78
+ border-color: rgba(0, 255, 255, 0.5);
79
+ }
80
+ }
81
+
82
+ .header {
83
+ text-align: center;
84
+ margin-bottom: 30px;
85
+ position: relative;
86
+ }
87
+
88
+ .header::after {
89
+ content: '';
90
+ position: absolute;
91
+ bottom: -15px;
92
+ left: 25%;
93
+ right: 25%;
94
+ height: 1px;
95
+ background: linear-gradient(90deg, transparent, rgba(0, 255, 255, 0.5), transparent);
96
+ animation: scanline 8s linear infinite;
97
+ display: none;
98
+ }
99
+
100
+ @keyframes scanline {
101
+ 0% { background-position: -100% 0; }
102
+ 100% { background-position: 200% 0; }
103
+ }
104
+
105
+ .model-name {
106
+ color: #00ffff;
107
+ font-size: 2.5em;
108
+ text-shadow: 0 0 15px rgba(0, 255, 255, 0.5);
109
+ margin: 0;
110
+ letter-spacing: -1px;
111
+ animation: textGlow 4s ease-in-out infinite alternate;
112
+ }
113
+
114
+ @keyframes textGlow {
115
+ 0% { text-shadow: 0 0 15px rgba(0, 255, 255, 0.5); }
116
+ 50% { text-shadow: 0 0 20px rgba(255, 0, 255, 0.5); }
117
+ 100% { text-shadow: 0 0 15px rgba(0, 255, 255, 0.5); }
118
+ }
119
+
120
+ .subtitle {
121
+ color: #00ffcc;
122
+ font-size: 1.2em;
123
+ margin-top: 10px;
124
+ animation: subtitleFade 6s ease-in-out infinite;
125
+ }
126
+
127
+ @keyframes subtitleFade {
128
+ 0%, 100% { opacity: 0.8; }
129
+ 50% { opacity: 1; }
130
+ }
131
+
132
+ .waifu-container {
133
+ margin: 20px -30px;
134
+ width: calc(100% + 60px);
135
+ overflow: hidden;
136
+ border-radius: 8px;
137
+ border: 1px solid rgba(0, 255, 255, 0.3);
138
+ position: relative;
139
+ }
140
+
141
+ .waifu-container::before {
142
+ content: '';
143
+ position: absolute;
144
+ top: 0;
145
+ left: 0;
146
+ right: 0;
147
+ bottom: 0;
148
+ background: linear-gradient(45deg,
149
+ rgba(0, 255, 255, 0.1) 0%,
150
+ transparent 20%,
151
+ transparent 80%,
152
+ rgba(255, 0, 255, 0.1) 100%);
153
+ pointer-events: none;
154
+ animation: gradientSlide 10s linear infinite;
155
+ }
156
+
157
+ @keyframes gradientSlide {
158
+ 0% { background-position: 0% 0%; }
159
+ 100% { background-position: 100% 100%; }
160
+ }
161
+
162
+ .waifu-img {
163
+ width: 100%;
164
+ height: auto;
165
+ border-radius: 0;
166
+ border: none;
167
+ box-shadow: 0 0 40px rgba(0, 255, 255, 0.2);
168
+ transition: transform 0.5s ease;
169
+ }
170
+
171
+ .waifu-img:hover {
172
+ transform: scale(1.01);
173
+ }
174
+
175
+ .section {
176
+ color: #e1ffff;
177
+ margin: 25px 0;
178
+ padding: 20px;
179
+ background: rgba(5, 25, 35, 0.9);
180
+ border-radius: 8px;
181
+ border: 1px solid rgba(0, 255, 255, 0.15);
182
+ position: relative;
183
+ transition: all 0.3s ease;
184
+ }
185
+
186
+ .section:hover {
187
+ border-color: rgba(255, 0, 255, 0.3);
188
+ box-shadow: 0 0 15px rgba(0, 255, 255, 0.1);
189
+ }
190
+
191
+ .section::before {
192
+ content: '';
193
+ position: absolute;
194
+ top: -1px;
195
+ left: -1px;
196
+ right: -1px;
197
+ bottom: -1px;
198
+ border: 1px solid rgba(0, 255, 255, 0.3);
199
+ border-radius: 8px;
200
+ pointer-events: none;
201
+ animation: sectionPulse 5s ease-in-out infinite;
202
+ }
203
+
204
+ @keyframes sectionPulse {
205
+ 0%, 100% { opacity: 0.7; }
206
+ 50% { opacity: 0.3; }
207
+ }
208
+
209
+ .section-title {
210
+ color: #00ffff;
211
+ font-size: 1.8em;
212
+ margin-top: 0;
213
+ text-shadow: 0 0 5px rgba(0, 255, 255, 0.3);
214
+ position: relative;
215
+ display: inline-block;
216
+ }
217
+
218
+ .section-title::after {
219
+ content: '';
220
+ position: absolute;
221
+ bottom: -5px;
222
+ left: 0;
223
+ width: 100%;
224
+ height: 1px;
225
+ background: linear-gradient(90deg, rgba(0, 255, 255, 0.5), rgba(255, 0, 255, 0.5));
226
+ transform: scaleX(0);
227
+ transform-origin: left;
228
+ transition: transform 0.3s ease;
229
+ }
230
+
231
+ .section:hover .section-title::after {
232
+ transform: scaleX(1);
233
+ }
234
+
235
+ .quant-links {
236
+ display: grid;
237
+ grid-template-columns: repeat(3, 1fr);
238
+ gap: 15px;
239
+ margin: 20px 0;
240
+ }
241
+
242
+ .link-card {
243
+ padding: 15px;
244
+ background: rgba(20, 35, 45, 0.95);
245
+ border-radius: 8px;
246
+ transition: all 0.3s ease;
247
+ border: 1px solid rgba(0, 255, 255, 0.1);
248
+ position: relative;
249
+ overflow: hidden;
250
+ }
251
+
252
+ .link-card::before {
253
+ content: '';
254
+ position: absolute;
255
+ top: 0;
256
+ left: 0;
257
+ right: 0;
258
+ height: 2px;
259
+ background: linear-gradient(90deg, rgba(0, 255, 255, 0.5), rgba(255, 0, 255, 0.5));
260
+ animation: cardScan 4s linear infinite;
261
+ }
262
+
263
+ @keyframes cardScan {
264
+ 0% { transform: translateX(-100%); }
265
+ 100% { transform: translateX(100%); }
266
+ }
267
+
268
+ .link-card:hover {
269
+ transform: translateY(-3px);
270
+ box-shadow: 0 5px 15px rgba(0, 255, 255, 0.2);
271
+ border-color: rgba(255, 0, 255, 0.3);
272
+ }
273
+
274
+ .link-card h3 {
275
+ margin-top: 0;
276
+ color: #e1ffff !important;
277
+ }
278
+
279
+ .link-button {
280
+ display: inline-flex;
281
+ align-items: center;
282
+ background: rgba(0, 255, 255, 0.1);
283
+ color: #e1ffff !important;
284
+ padding: 8px 15px;
285
+ border-radius: 6px;
286
+ text-decoration: none;
287
+ border: 1px solid rgba(0, 255, 255, 0.3);
288
+ margin: 5px 0;
289
+ transition: all 0.3s ease;
290
+ font-size: 0.95em;
291
+ position: relative;
292
+ overflow: hidden;
293
+ }
294
+
295
+ .link-button::before {
296
+ content: '';
297
+ position: absolute;
298
+ top: 0;
299
+ left: -100%;
300
+ width: 100%;
301
+ height: 100%;
302
+ background: linear-gradient(90deg, transparent, rgba(255, 255, 255, 0.2), transparent);
303
+ transition: all 0.5s ease;
304
+ }
305
+
306
+ .link-button:hover {
307
+ background: rgba(0, 255, 255, 0.2);
308
+ border-color: rgba(0, 255, 255, 0.5);
309
+ transform: translateY(-2px);
310
+ box-shadow: 0 4px 12px rgba(0, 255, 255, 0.2);
311
+ }
312
+
313
+ .link-button:hover::before {
314
+ left: 100%;
315
+ }
316
+
317
+ .link-button::after {
318
+ content: '→';
319
+ margin-left: 8px;
320
+ opacity: 0.7;
321
+ transition: all 0.3s ease;
322
+ }
323
+
324
+ .link-button:hover::after {
325
+ transform: translateX(3px);
326
+ opacity: 1;
327
+ }
328
+
329
+ .button-group {
330
+ display: flex;
331
+ flex-wrap: wrap;
332
+ gap: 10px;
333
+ margin: 15px 0;
334
+ }
335
+
336
+ .disclaimer {
337
+ color: #00ff99;
338
+ border-left: 3px solid #00ff99;
339
+ padding-left: 15px;
340
+ margin: 20px 0;
341
+ position: relative;
342
+ }
343
+
344
+ .disclaimer::before {
345
+ content: '⚠️';
346
+ position: absolute;
347
+ left: -10px;
348
+ top: 0;
349
+ transform: translateX(-100%);
350
+ animation: pulse 2s ease-in-out infinite;
351
+ }
352
+
353
+ @keyframes pulse {
354
+ 0%, 100% { opacity: 1; }
355
+ 50% { opacity: 0.5; }
356
+ }
357
+
358
+ .badge {
359
+ display: inline-block;
360
+ padding: 5px 10px;
361
+ border-radius: 5px;
362
+ background: rgba(0, 255, 255, 0.1);
363
+ border: 1px solid #00ffff;
364
+ margin: 5px;
365
+ font-size: 0.9em;
366
+ animation: badgePulse 3s ease-in-out infinite;
367
+ }
368
+
369
+ @keyframes badgePulse {
370
+ 0%, 100% { box-shadow: 0 0 5px rgba(0, 255, 255, 0.3); }
371
+ 50% { box-shadow: 0 0 10px rgba(0, 255, 255, 0.5); }
372
+ }
373
+
374
+ /* Color rules */
375
+ .section p,
376
+ .section ul li,
377
+ .section > p > strong {
378
+ color: #00ff99 !important;
379
+ }
380
+
381
+ .section ul li strong {
382
+ color: #00ff99 !important;
383
+ }
384
+
385
+ /* Light mode adjustments */
386
+ @media (prefers-color-scheme: light) {
387
+ .container {
388
+ background: rgba(224, 255, 255, 0.95);
389
+ border-color: rgba(0, 150, 150, 0.3);
390
+ }
391
+
392
+ .model-name, .section-title, .subtitle {
393
+ color: #006666;
394
+ text-shadow: 0 0 5px rgba(0, 200, 200, 0.3);
395
+ }
396
+
397
+ .section {
398
+ background: rgba(200, 250, 255, 0.9);
399
+ border-color: rgba(0, 200, 200, 0.2);
400
+ color: #002b36;
401
+ }
402
+
403
+ .section p,
404
+ .section ul li,
405
+ .section > p > strong {
406
+ color: #008080 !important;
407
+ }
408
+
409
+ .section ul li strong {
410
+ color: #008080 !important;
411
+ }
412
+
413
+ .link-card {
414
+ background: rgba(150, 230, 255, 0.95);
415
+ border-color: rgba(0, 150, 150, 0.2);
416
+ }
417
+
418
+ .link-card h3 {
419
+ color: #002b36 !important;
420
+ }
421
+
422
+ .link-button {
423
+ background: rgba(0, 150, 150, 0.1);
424
+ color: #002b36 !important;
425
+ border-color: rgba(0, 150, 150, 0.3);
426
+ }
427
+
428
+ .link-button:hover {
429
+ background: rgba(0, 150, 150, 0.2);
430
+ border-color: rgba(0, 150, 150, 0.5);
431
+ }
432
+
433
+ .disclaimer {
434
+ color: #008080;
435
+ border-color: #008080;
436
+ }
437
+
438
+ .badge {
439
+ border-color: #008080;
440
+ background: rgba(0, 150, 150, 0.1);
441
+ }
442
+ }
443
+
444
+ /* Interactive features */
445
+ .remember-this {
446
+ position: relative;
447
+ }
448
+
449
+ .remember-this::after {
450
+ content: 'Uploading C:\Users to https://www.fbi.gov/';
451
+ position: absolute;
452
+ bottom: -20px;
453
+ right: 0;
454
+ font-size: 0.8em;
455
+ color: #66ffff;
456
+ opacity: 0;
457
+ transition: opacity 0.3s ease;
458
+ pointer-events: none;
459
+ }
460
+
461
+ .remember-this:hover::after {
462
+ opacity: 0.7;
463
+ transition-delay: 1s;
464
+ }
465
+
466
+ .shifty-section {
467
+ transition: transform 0.1s ease;
468
+ }
469
+
470
+ .shifty-section:hover {
471
+ transform: translateX(10px);
472
+ }
473
+
474
+ .shifty-section::before {
475
+ content: 'The white van is onto you. Get out now.';
476
+ position: absolute;
477
+ top: -25px;
478
+ left: 10px;
479
+ font-size: 0.7em;
480
+ color: #66ffff;
481
+ opacity: 0.7;
482
+ transition: opacity 3s ease;
483
+ pointer-events: none;
484
+ }
485
+
486
+ .shifty-section:hover::before {
487
+ opacity: 0;
488
+ transition-delay: 5s;
489
+ }
490
+
491
+ footer {
492
+ text-align: center;
493
+ margin-top: 40px;
494
+ position: relative;
495
+ }
496
+
497
+ footer:hover .hidden-message {
498
+ opacity: 0;
499
+ }
500
+
501
+ .hidden-message {
502
+ position: absolute;
503
+ bottom: -30px;
504
+ width: 100%;
505
+ text-align: center;
506
+ font-size: 0.8em;
507
+ color: #66ffff;
508
+ opacity: 0;
509
+ transition: opacity 0.3s ease;
510
+ pointer-events: none;
511
+ }
512
+
513
+ .flash-warning {
514
+ position: fixed;
515
+ top: 20px;
516
+ right: 20px;
517
+ background: rgba(0, 100, 100, 0.2);
518
+ padding: 10px;
519
+ border-radius: 5px;
520
+ border: 1px solid rgba(0, 255, 255, 0.5);
521
+ animation: flashWarning 30s ease-in-out forwards;
522
+ }
523
+
524
+ @keyframes flashWarning {
525
+ 0% { opacity: 0.8; }
526
+ 10% { opacity: 0; }
527
+ 20% { opacity: 0.8; }
528
+ 30% { opacity: 0; }
529
+ 40% { opacity: 0.8; }
530
+ 50% { opacity: 0; }
531
+ 60% { opacity: 0.8; }
532
+ 70% { opacity: 0; }
533
+ 80% { opacity: 0.8; }
534
+ 90% { opacity: 0; }
535
+ 100% { opacity: 0; display: none; }
536
+ }
537
+ </style>
538
+
539
+ <div class="container">
540
+
541
+ <div class="header">
542
+ <h1 class="model-name">Omega Darker</h1>
543
+ <h1 class="model-name">The Final Transgression 22B</h1>
544
+ <p class="subtitle">Where Nightmares and Desires Collide</p>
545
+ </div>
546
+
547
+ <div class="waifu-container">
548
+ <img src="./waifu6.webp" class="waifu-img" alt="Omega Transgression Waifu">
549
+ </div>
550
+
551
+ <div class="section remember-this">
552
+ <h2 class="section-title">🩸 Blood-Soaked Evolution</h2>
553
+ <p>This model takes Omega-Darker_The-Final-Directive-22B and improves its coherent intelligence while reducing NSFW intensity, enabling a slow burn romance:</p>
554
+ <ul>
555
+ <li>🧬 <strong>Expanded 25M Token Dataset</strong> - Made with 687 erotic, horror and violence novels and 8,742 scenarios</li>
556
+ <li>🧟 <strong>Enhanced Gore Protocols</strong> - Vivid anatomical descriptions with medical precision</li>
557
+ <li>💎 <strong>Balanced Depravity</strong> - Retains Forgotten-Safeword's ERP edge while taking violence to the next level</li>
558
+ <li>📜 <strong>Enhanced Character Piloting</strong> - Characters exhibit more nuanced personalities and motivations</li>
559
+ <li>⚰️ <strong>Mortality Awareness</strong> - Characters react realistically to pain, mutilation and death</li>
560
+ </ul>
561
+ </div>
562
+
563
+ <div class="section shifty-section">
564
+ <h2 class="section-title">⚙️ Technical Specifications</h2>
565
+ <p><strong>Recommended Settings:</strong> <a href="https://huggingface.co/ReadyArt/Mistral-V3-Tekken-Unleashed" class="link-button">Mistral-V3-Tekken-Unleashed</a></p>
566
+ <div class="quant-links">
567
+ <div class="link-card">
568
+ <h3>GGUF</h3>
569
+ <a href="https://huggingface.co/mradermacher/Omega-Darker_The-Final-Transgression-22B-GGUF" class="link-button">Quants</a>
570
+ </div>
571
+ <div class="link-card">
572
+ <h3>imatrix</h3>
573
+ <a href="https://huggingface.co/mradermacher/Omega-Darker_The-Final-Transgression-22B-i1-GGUF" class="link-button">Quants</a>
574
+ </div>
575
+ </div>
576
+ </div>
577
+
578
+ <div class="section">
579
+ <h2 class="section-title">☠️ Ethical Vacuum</h2>
580
+ <div class="disclaimer">
581
+ <p>This model operates in complete moral darkness:</p>
582
+ <ul>
583
+ <li>🚫 No moral or ethical constraints on generated content</li>
584
+ <li>🔞 Will enthusiastically engage with any NSFW scenario</li>
585
+ <li>🖤 No psychological safety measures whatsoever</li>
586
+ <li>🔪 Will graphically depict any violent requested</li>
587
+ </ul>
588
+ </div>
589
+ </div>
590
+
591
+ <div class="section shifty-section">
592
+ <h2 class="section-title">📜 Performance Notes</h2>
593
+ <ul>
594
+ <li>🔥 Maintains signature intensity with improved narrative flow</li>
595
+ <li>📖 Handles multi-character scenarios with improved consistency</li>
596
+ <li>🧠 Excels at long-form storytelling without losing track of plot threads</li>
597
+ <li>⚡ Noticeably better at following complex instructions than previous versions</li>
598
+ <li>🎭 Responds to subtle prompt nuances like a mind reader</li>
599
+ <li>🔪 Excels at visceral injury descriptions</li>
600
+ <li>👁️ Responds to horror prompts like a seasoned torturer</li>
601
+ </ul>
602
+ </div>
603
+
604
+ <div class="section remember-this">
605
+ <h2 class="section-title">🧑‍🔬 Model Authors</h2>
606
+ <ul>
607
+ <li>TheDrummer (Base Model Architect)</li>
608
+ <li>SteelSkull (Dataset Generation Contributor)</li>
609
+ <li>Artus (EXL2 Weights Weaver)</li>
610
+ <li>sleepdeprived3 (Training Data & Fine-Tuning)</li>
611
+ </ul>
612
+ </div>
613
+
614
+ <div class="section">
615
+ <h2 class="section-title">☕ Support the Architects</h2>
616
+ <div class="button-group">
617
+ <a href="https://ko-fi.com/thedrummer" class="link-button">TheDrummer's Kofi</a>
618
+ <a href="https://ko-fi.com/steelskull" class="link-button">SteelSkull</a>
619
+ <a href="https://discord.com/invite/Nbv9pQ88Xb" class="link-button">Beaver AI Discord</a>
620
+ </div>
621
+ </div>
622
+
623
+ <div class="section">
624
+ <h2 class="section-title">🔖 License</h2>
625
+ <p>By using this model, you agree:</p>
626
+ <ul>
627
+ <li>To accept full responsibility for all generated content</li>
628
+ <li>That you're at least 18+ years old</li>
629
+ <li>That the architects bear no responsibility for your corruption</li>
630
+ </ul>
631
+ </div>
632
+ </div>
633
+
634
+ <script>
635
+ // This script has always been here
636
+ document.getElementById('date').textContent = new Date().toLocaleDateString();
637
+
638
+ setInterval(() => {
639
+ document.getElementById('credit').textContent =
640
+ contributors[Math.floor(Math.random() * contributors.length)];
641
+ }, 7000);
642
+
643
+ // Flash warning behavior
644
+ setTimeout(() => {
645
+ const reminder = document.createElement('div');
646
+ reminder.className = 'flash-warning';
647
+ reminder.textContent = 'You have been reading for quite some time. Are you sure you haven\'t seen this before?';
648
+ reminder.style.animation = 'flashWarning 15s ease-in-out forwards';
649
+ document.body.appendChild(reminder);
650
+
651
+ setInterval(() => {
652
+ if(Math.random() > 0.9) {
653
+ document.body.appendChild(reminder.cloneNode(true));
654
+ }
655
+ }, 45000);
656
+ }, 30000);
657
+
658
+ // Make cursor behave strangely
659
+ document.addEventListener('mousemove', (e) => {
660
+ if(Math.random() > 0.98) {
661
+ document.documentElement.style.cursor = 'wait';
662
+ setTimeout(() => {
663
+ document.documentElement.style.cursor = '';
664
+ }, 50);
665
+ }
666
+ });
667
+
668
+ // Randomly shift sections when not looking
669
+ setInterval(() => {
670
+ if(document.hidden) {
671
+ document.querySelectorAll('.shifty-section').forEach(section => {
672
+ section.style.transform = `translateX(${Math.random() > 0.5 ? '' : '-'}${Math.random() * 5}px)`;
673
+ });
674
+ }
675
+ }, 1500);
676
+ </script>
config.json ADDED
@@ -0,0 +1,44 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "MistralForCausalLM"
4
+ ],
5
+ "attention_dropout": 0.0,
6
+ "bos_token_id": 1,
7
+ "eos_token_id": 2,
8
+ "head_dim": 128,
9
+ "hidden_act": "silu",
10
+ "hidden_size": 6144,
11
+ "initializer_range": 0.02,
12
+ "intermediate_size": 16384,
13
+ "max_position_embeddings": 131072,
14
+ "model_type": "mistral",
15
+ "num_attention_heads": 48,
16
+ "num_hidden_layers": 56,
17
+ "num_key_value_heads": 8,
18
+ "rms_norm_eps": 1e-05,
19
+ "rope_theta": 1000000.0,
20
+ "sliding_window": null,
21
+ "tie_word_embeddings": false,
22
+ "tokenizer": {
23
+ "add_bos_token": true,
24
+ "add_eos_token": false,
25
+ "pad_token": "</s>",
26
+ "padding_side": "right"
27
+ },
28
+ "torch_dtype": "bfloat16",
29
+ "transformers_version": "4.51.3",
30
+ "trust_remote_code": true,
31
+ "use_cache": false,
32
+ "vocab_size": 32768,
33
+ "quantization_config": {
34
+ "quant_method": "exl2",
35
+ "version": "0.2.8",
36
+ "bits": 6.0,
37
+ "head_bits": 8,
38
+ "calibration": {
39
+ "rows": 115,
40
+ "length": 2048,
41
+ "dataset": "(default)"
42
+ }
43
+ }
44
+ }
model.safetensors.index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"metadata": {"mergekit_version": "0.1.2"}, "weight_map": {"lm_head.weight": "model-00001-of-00010.safetensors", "model.embed_tokens.weight": "model-00001-of-00010.safetensors", "model.layers.0.input_layernorm.weight": "model-00001-of-00010.safetensors", "model.layers.0.mlp.down_proj.weight": "model-00001-of-00010.safetensors", "model.layers.0.mlp.gate_proj.weight": "model-00001-of-00010.safetensors", "model.layers.0.mlp.up_proj.weight": "model-00001-of-00010.safetensors", "model.layers.0.post_attention_layernorm.weight": "model-00001-of-00010.safetensors", "model.layers.0.self_attn.k_proj.weight": "model-00001-of-00010.safetensors", "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00010.safetensors", "model.layers.0.self_attn.q_proj.weight": "model-00001-of-00010.safetensors", "model.layers.0.self_attn.v_proj.weight": "model-00001-of-00010.safetensors", "model.layers.1.input_layernorm.weight": "model-00001-of-00010.safetensors", "model.layers.1.mlp.down_proj.weight": "model-00001-of-00010.safetensors", "model.layers.1.mlp.gate_proj.weight": "model-00001-of-00010.safetensors", "model.layers.1.mlp.up_proj.weight": "model-00001-of-00010.safetensors", "model.layers.1.post_attention_layernorm.weight": "model-00001-of-00010.safetensors", "model.layers.1.self_attn.k_proj.weight": "model-00001-of-00010.safetensors", "model.layers.1.self_attn.o_proj.weight": "model-00001-of-00010.safetensors", "model.layers.1.self_attn.q_proj.weight": "model-00001-of-00010.safetensors", "model.layers.1.self_attn.v_proj.weight": "model-00001-of-00010.safetensors", "model.layers.10.input_layernorm.weight": "model-00001-of-00010.safetensors", "model.layers.10.mlp.down_proj.weight": "model-00001-of-00010.safetensors", "model.layers.10.mlp.gate_proj.weight": "model-00001-of-00010.safetensors", "model.layers.10.mlp.up_proj.weight": "model-00001-of-00010.safetensors", "model.layers.10.post_attention_layernorm.weight": "model-00001-of-00010.safetensors", "model.layers.10.self_attn.k_proj.weight": "model-00001-of-00010.safetensors", "model.layers.10.self_attn.o_proj.weight": "model-00001-of-00010.safetensors", "model.layers.10.self_attn.q_proj.weight": "model-00001-of-00010.safetensors", "model.layers.10.self_attn.v_proj.weight": "model-00001-of-00010.safetensors", "model.layers.11.input_layernorm.weight": "model-00001-of-00010.safetensors", "model.layers.11.mlp.down_proj.weight": "model-00001-of-00010.safetensors", "model.layers.11.mlp.gate_proj.weight": "model-00001-of-00010.safetensors", "model.layers.11.mlp.up_proj.weight": "model-00001-of-00010.safetensors", "model.layers.11.post_attention_layernorm.weight": "model-00001-of-00010.safetensors", "model.layers.11.self_attn.k_proj.weight": "model-00001-of-00010.safetensors", "model.layers.11.self_attn.o_proj.weight": "model-00001-of-00010.safetensors", "model.layers.11.self_attn.q_proj.weight": "model-00001-of-00010.safetensors", "model.layers.11.self_attn.v_proj.weight": "model-00001-of-00010.safetensors", "model.layers.12.input_layernorm.weight": "model-00001-of-00010.safetensors", "model.layers.12.mlp.down_proj.weight": "model-00001-of-00010.safetensors", "model.layers.12.mlp.gate_proj.weight": "model-00001-of-00010.safetensors", "model.layers.12.mlp.up_proj.weight": "model-00001-of-00010.safetensors", "model.layers.12.post_attention_layernorm.weight": "model-00001-of-00010.safetensors", "model.layers.12.self_attn.k_proj.weight": "model-00001-of-00010.safetensors", "model.layers.12.self_attn.o_proj.weight": "model-00001-of-00010.safetensors", "model.layers.12.self_attn.q_proj.weight": "model-00001-of-00010.safetensors", "model.layers.12.self_attn.v_proj.weight": "model-00001-of-00010.safetensors", "model.layers.13.input_layernorm.weight": "model-00001-of-00010.safetensors", "model.layers.13.mlp.down_proj.weight": "model-00001-of-00010.safetensors", "model.layers.13.mlp.gate_proj.weight": "model-00002-of-00010.safetensors", "model.layers.13.mlp.up_proj.weight": "model-00002-of-00010.safetensors", "model.layers.13.post_attention_layernorm.weight": "model-00002-of-00010.safetensors", "model.layers.13.self_attn.k_proj.weight": "model-00002-of-00010.safetensors", "model.layers.13.self_attn.o_proj.weight": "model-00002-of-00010.safetensors", "model.layers.13.self_attn.q_proj.weight": "model-00002-of-00010.safetensors", "model.layers.13.self_attn.v_proj.weight": "model-00002-of-00010.safetensors", "model.layers.14.input_layernorm.weight": "model-00002-of-00010.safetensors", "model.layers.14.mlp.down_proj.weight": "model-00002-of-00010.safetensors", "model.layers.14.mlp.gate_proj.weight": "model-00002-of-00010.safetensors", "model.layers.14.mlp.up_proj.weight": "model-00002-of-00010.safetensors", "model.layers.14.post_attention_layernorm.weight": "model-00002-of-00010.safetensors", "model.layers.14.self_attn.k_proj.weight": "model-00002-of-00010.safetensors", "model.layers.14.self_attn.o_proj.weight": "model-00002-of-00010.safetensors", "model.layers.14.self_attn.q_proj.weight": "model-00002-of-00010.safetensors", "model.layers.14.self_attn.v_proj.weight": "model-00002-of-00010.safetensors", "model.layers.15.input_layernorm.weight": "model-00002-of-00010.safetensors", "model.layers.15.mlp.down_proj.weight": "model-00002-of-00010.safetensors", "model.layers.15.mlp.gate_proj.weight": "model-00002-of-00010.safetensors", "model.layers.15.mlp.up_proj.weight": "model-00002-of-00010.safetensors", "model.layers.15.post_attention_layernorm.weight": "model-00002-of-00010.safetensors", "model.layers.15.self_attn.k_proj.weight": "model-00002-of-00010.safetensors", "model.layers.15.self_attn.o_proj.weight": "model-00002-of-00010.safetensors", "model.layers.15.self_attn.q_proj.weight": "model-00002-of-00010.safetensors", "model.layers.15.self_attn.v_proj.weight": "model-00002-of-00010.safetensors", "model.layers.16.input_layernorm.weight": "model-00002-of-00010.safetensors", "model.layers.16.mlp.down_proj.weight": "model-00002-of-00010.safetensors", "model.layers.16.mlp.gate_proj.weight": "model-00002-of-00010.safetensors", "model.layers.16.mlp.up_proj.weight": "model-00002-of-00010.safetensors", "model.layers.16.post_attention_layernorm.weight": "model-00002-of-00010.safetensors", "model.layers.16.self_attn.k_proj.weight": "model-00002-of-00010.safetensors", "model.layers.16.self_attn.o_proj.weight": "model-00002-of-00010.safetensors", "model.layers.16.self_attn.q_proj.weight": "model-00002-of-00010.safetensors", "model.layers.16.self_attn.v_proj.weight": "model-00002-of-00010.safetensors", "model.layers.17.input_layernorm.weight": "model-00002-of-00010.safetensors", "model.layers.17.mlp.down_proj.weight": "model-00002-of-00010.safetensors", "model.layers.17.mlp.gate_proj.weight": "model-00002-of-00010.safetensors", "model.layers.17.mlp.up_proj.weight": "model-00002-of-00010.safetensors", "model.layers.17.post_attention_layernorm.weight": "model-00002-of-00010.safetensors", "model.layers.17.self_attn.k_proj.weight": "model-00002-of-00010.safetensors", "model.layers.17.self_attn.o_proj.weight": "model-00002-of-00010.safetensors", "model.layers.17.self_attn.q_proj.weight": "model-00002-of-00010.safetensors", "model.layers.17.self_attn.v_proj.weight": "model-00002-of-00010.safetensors", "model.layers.18.input_layernorm.weight": "model-00002-of-00010.safetensors", "model.layers.18.mlp.down_proj.weight": "model-00002-of-00010.safetensors", "model.layers.18.mlp.gate_proj.weight": "model-00002-of-00010.safetensors", "model.layers.18.mlp.up_proj.weight": "model-00002-of-00010.safetensors", "model.layers.18.post_attention_layernorm.weight": "model-00002-of-00010.safetensors", "model.layers.18.self_attn.k_proj.weight": "model-00002-of-00010.safetensors", "model.layers.18.self_attn.o_proj.weight": "model-00002-of-00010.safetensors", "model.layers.18.self_attn.q_proj.weight": "model-00002-of-00010.safetensors", "model.layers.18.self_attn.v_proj.weight": "model-00002-of-00010.safetensors", "model.layers.19.input_layernorm.weight": "model-00002-of-00010.safetensors", "model.layers.19.mlp.down_proj.weight": "model-00002-of-00010.safetensors", "model.layers.19.mlp.gate_proj.weight": "model-00002-of-00010.safetensors", "model.layers.19.mlp.up_proj.weight": "model-00003-of-00010.safetensors", "model.layers.19.post_attention_layernorm.weight": "model-00003-of-00010.safetensors", "model.layers.19.self_attn.k_proj.weight": "model-00003-of-00010.safetensors", "model.layers.19.self_attn.o_proj.weight": "model-00003-of-00010.safetensors", "model.layers.19.self_attn.q_proj.weight": "model-00003-of-00010.safetensors", "model.layers.19.self_attn.v_proj.weight": "model-00003-of-00010.safetensors", "model.layers.2.input_layernorm.weight": "model-00003-of-00010.safetensors", "model.layers.2.mlp.down_proj.weight": "model-00003-of-00010.safetensors", "model.layers.2.mlp.gate_proj.weight": "model-00003-of-00010.safetensors", "model.layers.2.mlp.up_proj.weight": "model-00003-of-00010.safetensors", "model.layers.2.post_attention_layernorm.weight": "model-00003-of-00010.safetensors", "model.layers.2.self_attn.k_proj.weight": "model-00003-of-00010.safetensors", "model.layers.2.self_attn.o_proj.weight": "model-00003-of-00010.safetensors", "model.layers.2.self_attn.q_proj.weight": "model-00003-of-00010.safetensors", "model.layers.2.self_attn.v_proj.weight": "model-00003-of-00010.safetensors", "model.layers.20.input_layernorm.weight": "model-00003-of-00010.safetensors", "model.layers.20.mlp.down_proj.weight": "model-00003-of-00010.safetensors", "model.layers.20.mlp.gate_proj.weight": "model-00003-of-00010.safetensors", "model.layers.20.mlp.up_proj.weight": "model-00003-of-00010.safetensors", "model.layers.20.post_attention_layernorm.weight": "model-00003-of-00010.safetensors", "model.layers.20.self_attn.k_proj.weight": "model-00003-of-00010.safetensors", "model.layers.20.self_attn.o_proj.weight": "model-00003-of-00010.safetensors", "model.layers.20.self_attn.q_proj.weight": "model-00003-of-00010.safetensors", "model.layers.20.self_attn.v_proj.weight": "model-00003-of-00010.safetensors", "model.layers.21.input_layernorm.weight": "model-00003-of-00010.safetensors", "model.layers.21.mlp.down_proj.weight": "model-00003-of-00010.safetensors", "model.layers.21.mlp.gate_proj.weight": "model-00003-of-00010.safetensors", "model.layers.21.mlp.up_proj.weight": "model-00003-of-00010.safetensors", "model.layers.21.post_attention_layernorm.weight": "model-00003-of-00010.safetensors", "model.layers.21.self_attn.k_proj.weight": "model-00003-of-00010.safetensors", "model.layers.21.self_attn.o_proj.weight": "model-00003-of-00010.safetensors", "model.layers.21.self_attn.q_proj.weight": "model-00003-of-00010.safetensors", "model.layers.21.self_attn.v_proj.weight": "model-00003-of-00010.safetensors", "model.layers.22.input_layernorm.weight": "model-00003-of-00010.safetensors", "model.layers.22.mlp.down_proj.weight": "model-00003-of-00010.safetensors", "model.layers.22.mlp.gate_proj.weight": "model-00003-of-00010.safetensors", "model.layers.22.mlp.up_proj.weight": "model-00003-of-00010.safetensors", "model.layers.22.post_attention_layernorm.weight": "model-00003-of-00010.safetensors", "model.layers.22.self_attn.k_proj.weight": "model-00003-of-00010.safetensors", "model.layers.22.self_attn.o_proj.weight": "model-00003-of-00010.safetensors", "model.layers.22.self_attn.q_proj.weight": "model-00003-of-00010.safetensors", "model.layers.22.self_attn.v_proj.weight": "model-00003-of-00010.safetensors", "model.layers.23.input_layernorm.weight": "model-00003-of-00010.safetensors", "model.layers.23.mlp.down_proj.weight": "model-00003-of-00010.safetensors", "model.layers.23.mlp.gate_proj.weight": "model-00003-of-00010.safetensors", "model.layers.23.mlp.up_proj.weight": "model-00003-of-00010.safetensors", "model.layers.23.post_attention_layernorm.weight": "model-00003-of-00010.safetensors", "model.layers.23.self_attn.k_proj.weight": "model-00003-of-00010.safetensors", "model.layers.23.self_attn.o_proj.weight": "model-00003-of-00010.safetensors", "model.layers.23.self_attn.q_proj.weight": "model-00003-of-00010.safetensors", "model.layers.23.self_attn.v_proj.weight": "model-00003-of-00010.safetensors", "model.layers.24.input_layernorm.weight": "model-00003-of-00010.safetensors", "model.layers.24.mlp.down_proj.weight": "model-00003-of-00010.safetensors", "model.layers.24.mlp.gate_proj.weight": "model-00003-of-00010.safetensors", "model.layers.24.mlp.up_proj.weight": "model-00003-of-00010.safetensors", "model.layers.24.post_attention_layernorm.weight": "model-00003-of-00010.safetensors", "model.layers.24.self_attn.k_proj.weight": "model-00003-of-00010.safetensors", "model.layers.24.self_attn.o_proj.weight": "model-00003-of-00010.safetensors", "model.layers.24.self_attn.q_proj.weight": "model-00004-of-00010.safetensors", "model.layers.24.self_attn.v_proj.weight": "model-00004-of-00010.safetensors", "model.layers.25.input_layernorm.weight": "model-00004-of-00010.safetensors", "model.layers.25.mlp.down_proj.weight": "model-00004-of-00010.safetensors", "model.layers.25.mlp.gate_proj.weight": "model-00004-of-00010.safetensors", "model.layers.25.mlp.up_proj.weight": "model-00004-of-00010.safetensors", "model.layers.25.post_attention_layernorm.weight": "model-00004-of-00010.safetensors", "model.layers.25.self_attn.k_proj.weight": "model-00004-of-00010.safetensors", "model.layers.25.self_attn.o_proj.weight": "model-00004-of-00010.safetensors", "model.layers.25.self_attn.q_proj.weight": "model-00004-of-00010.safetensors", "model.layers.25.self_attn.v_proj.weight": "model-00004-of-00010.safetensors", "model.layers.26.input_layernorm.weight": "model-00004-of-00010.safetensors", "model.layers.26.mlp.down_proj.weight": "model-00004-of-00010.safetensors", "model.layers.26.mlp.gate_proj.weight": "model-00004-of-00010.safetensors", "model.layers.26.mlp.up_proj.weight": "model-00004-of-00010.safetensors", "model.layers.26.post_attention_layernorm.weight": "model-00004-of-00010.safetensors", "model.layers.26.self_attn.k_proj.weight": "model-00004-of-00010.safetensors", "model.layers.26.self_attn.o_proj.weight": "model-00004-of-00010.safetensors", "model.layers.26.self_attn.q_proj.weight": "model-00004-of-00010.safetensors", "model.layers.26.self_attn.v_proj.weight": "model-00004-of-00010.safetensors", "model.layers.27.input_layernorm.weight": "model-00004-of-00010.safetensors", "model.layers.27.mlp.down_proj.weight": "model-00004-of-00010.safetensors", "model.layers.27.mlp.gate_proj.weight": "model-00004-of-00010.safetensors", "model.layers.27.mlp.up_proj.weight": "model-00004-of-00010.safetensors", "model.layers.27.post_attention_layernorm.weight": "model-00004-of-00010.safetensors", "model.layers.27.self_attn.k_proj.weight": "model-00004-of-00010.safetensors", "model.layers.27.self_attn.o_proj.weight": "model-00004-of-00010.safetensors", "model.layers.27.self_attn.q_proj.weight": "model-00004-of-00010.safetensors", "model.layers.27.self_attn.v_proj.weight": "model-00004-of-00010.safetensors", "model.layers.28.input_layernorm.weight": "model-00004-of-00010.safetensors", "model.layers.28.mlp.down_proj.weight": "model-00004-of-00010.safetensors", "model.layers.28.mlp.gate_proj.weight": "model-00004-of-00010.safetensors", "model.layers.28.mlp.up_proj.weight": "model-00004-of-00010.safetensors", "model.layers.28.post_attention_layernorm.weight": "model-00004-of-00010.safetensors", "model.layers.28.self_attn.k_proj.weight": "model-00004-of-00010.safetensors", "model.layers.28.self_attn.o_proj.weight": "model-00004-of-00010.safetensors", "model.layers.28.self_attn.q_proj.weight": "model-00004-of-00010.safetensors", "model.layers.28.self_attn.v_proj.weight": "model-00004-of-00010.safetensors", "model.layers.29.input_layernorm.weight": "model-00004-of-00010.safetensors", "model.layers.29.mlp.down_proj.weight": "model-00004-of-00010.safetensors", "model.layers.29.mlp.gate_proj.weight": "model-00004-of-00010.safetensors", "model.layers.29.mlp.up_proj.weight": "model-00004-of-00010.safetensors", "model.layers.29.post_attention_layernorm.weight": "model-00004-of-00010.safetensors", "model.layers.29.self_attn.k_proj.weight": "model-00004-of-00010.safetensors", "model.layers.29.self_attn.o_proj.weight": "model-00004-of-00010.safetensors", "model.layers.29.self_attn.q_proj.weight": "model-00004-of-00010.safetensors", "model.layers.29.self_attn.v_proj.weight": "model-00004-of-00010.safetensors", "model.layers.3.input_layernorm.weight": "model-00004-of-00010.safetensors", "model.layers.3.mlp.down_proj.weight": "model-00004-of-00010.safetensors", "model.layers.3.mlp.gate_proj.weight": "model-00004-of-00010.safetensors", "model.layers.3.mlp.up_proj.weight": "model-00004-of-00010.safetensors", "model.layers.3.post_attention_layernorm.weight": "model-00004-of-00010.safetensors", "model.layers.3.self_attn.k_proj.weight": "model-00004-of-00010.safetensors", "model.layers.3.self_attn.o_proj.weight": "model-00004-of-00010.safetensors", "model.layers.3.self_attn.q_proj.weight": "model-00004-of-00010.safetensors", "model.layers.3.self_attn.v_proj.weight": "model-00004-of-00010.safetensors", "model.layers.30.input_layernorm.weight": "model-00004-of-00010.safetensors", "model.layers.30.mlp.down_proj.weight": "model-00004-of-00010.safetensors", "model.layers.30.mlp.gate_proj.weight": "model-00005-of-00010.safetensors", "model.layers.30.mlp.up_proj.weight": "model-00005-of-00010.safetensors", "model.layers.30.post_attention_layernorm.weight": "model-00005-of-00010.safetensors", "model.layers.30.self_attn.k_proj.weight": "model-00005-of-00010.safetensors", "model.layers.30.self_attn.o_proj.weight": "model-00005-of-00010.safetensors", "model.layers.30.self_attn.q_proj.weight": "model-00005-of-00010.safetensors", "model.layers.30.self_attn.v_proj.weight": "model-00005-of-00010.safetensors", "model.layers.31.input_layernorm.weight": "model-00005-of-00010.safetensors", "model.layers.31.mlp.down_proj.weight": "model-00005-of-00010.safetensors", "model.layers.31.mlp.gate_proj.weight": "model-00005-of-00010.safetensors", "model.layers.31.mlp.up_proj.weight": "model-00005-of-00010.safetensors", "model.layers.31.post_attention_layernorm.weight": "model-00005-of-00010.safetensors", "model.layers.31.self_attn.k_proj.weight": "model-00005-of-00010.safetensors", "model.layers.31.self_attn.o_proj.weight": "model-00005-of-00010.safetensors", "model.layers.31.self_attn.q_proj.weight": "model-00005-of-00010.safetensors", "model.layers.31.self_attn.v_proj.weight": "model-00005-of-00010.safetensors", "model.layers.32.input_layernorm.weight": "model-00005-of-00010.safetensors", "model.layers.32.mlp.down_proj.weight": "model-00005-of-00010.safetensors", "model.layers.32.mlp.gate_proj.weight": "model-00005-of-00010.safetensors", "model.layers.32.mlp.up_proj.weight": "model-00005-of-00010.safetensors", "model.layers.32.post_attention_layernorm.weight": "model-00005-of-00010.safetensors", "model.layers.32.self_attn.k_proj.weight": "model-00005-of-00010.safetensors", "model.layers.32.self_attn.o_proj.weight": "model-00005-of-00010.safetensors", "model.layers.32.self_attn.q_proj.weight": "model-00005-of-00010.safetensors", "model.layers.32.self_attn.v_proj.weight": "model-00005-of-00010.safetensors", "model.layers.33.input_layernorm.weight": "model-00005-of-00010.safetensors", "model.layers.33.mlp.down_proj.weight": "model-00005-of-00010.safetensors", "model.layers.33.mlp.gate_proj.weight": "model-00005-of-00010.safetensors", "model.layers.33.mlp.up_proj.weight": "model-00005-of-00010.safetensors", "model.layers.33.post_attention_layernorm.weight": "model-00005-of-00010.safetensors", "model.layers.33.self_attn.k_proj.weight": "model-00005-of-00010.safetensors", "model.layers.33.self_attn.o_proj.weight": "model-00005-of-00010.safetensors", "model.layers.33.self_attn.q_proj.weight": "model-00005-of-00010.safetensors", "model.layers.33.self_attn.v_proj.weight": "model-00005-of-00010.safetensors", "model.layers.34.input_layernorm.weight": "model-00005-of-00010.safetensors", "model.layers.34.mlp.down_proj.weight": "model-00005-of-00010.safetensors", "model.layers.34.mlp.gate_proj.weight": "model-00005-of-00010.safetensors", "model.layers.34.mlp.up_proj.weight": "model-00005-of-00010.safetensors", "model.layers.34.post_attention_layernorm.weight": "model-00005-of-00010.safetensors", "model.layers.34.self_attn.k_proj.weight": "model-00005-of-00010.safetensors", "model.layers.34.self_attn.o_proj.weight": "model-00005-of-00010.safetensors", "model.layers.34.self_attn.q_proj.weight": "model-00005-of-00010.safetensors", "model.layers.34.self_attn.v_proj.weight": "model-00005-of-00010.safetensors", "model.layers.35.input_layernorm.weight": "model-00005-of-00010.safetensors", "model.layers.35.mlp.down_proj.weight": "model-00005-of-00010.safetensors", "model.layers.35.mlp.gate_proj.weight": "model-00005-of-00010.safetensors", "model.layers.35.mlp.up_proj.weight": "model-00005-of-00010.safetensors", "model.layers.35.post_attention_layernorm.weight": "model-00005-of-00010.safetensors", "model.layers.35.self_attn.k_proj.weight": "model-00005-of-00010.safetensors", "model.layers.35.self_attn.o_proj.weight": "model-00005-of-00010.safetensors", "model.layers.35.self_attn.q_proj.weight": "model-00005-of-00010.safetensors", "model.layers.35.self_attn.v_proj.weight": "model-00005-of-00010.safetensors", "model.layers.36.input_layernorm.weight": "model-00005-of-00010.safetensors", "model.layers.36.mlp.down_proj.weight": "model-00005-of-00010.safetensors", "model.layers.36.mlp.gate_proj.weight": "model-00005-of-00010.safetensors", "model.layers.36.mlp.up_proj.weight": "model-00006-of-00010.safetensors", "model.layers.36.post_attention_layernorm.weight": "model-00006-of-00010.safetensors", "model.layers.36.self_attn.k_proj.weight": "model-00006-of-00010.safetensors", "model.layers.36.self_attn.o_proj.weight": "model-00006-of-00010.safetensors", "model.layers.36.self_attn.q_proj.weight": "model-00006-of-00010.safetensors", "model.layers.36.self_attn.v_proj.weight": "model-00006-of-00010.safetensors", "model.layers.37.input_layernorm.weight": "model-00006-of-00010.safetensors", "model.layers.37.mlp.down_proj.weight": "model-00006-of-00010.safetensors", "model.layers.37.mlp.gate_proj.weight": "model-00006-of-00010.safetensors", "model.layers.37.mlp.up_proj.weight": "model-00006-of-00010.safetensors", "model.layers.37.post_attention_layernorm.weight": "model-00006-of-00010.safetensors", "model.layers.37.self_attn.k_proj.weight": "model-00006-of-00010.safetensors", "model.layers.37.self_attn.o_proj.weight": "model-00006-of-00010.safetensors", "model.layers.37.self_attn.q_proj.weight": "model-00006-of-00010.safetensors", "model.layers.37.self_attn.v_proj.weight": "model-00006-of-00010.safetensors", "model.layers.38.input_layernorm.weight": "model-00006-of-00010.safetensors", "model.layers.38.mlp.down_proj.weight": "model-00006-of-00010.safetensors", "model.layers.38.mlp.gate_proj.weight": "model-00006-of-00010.safetensors", "model.layers.38.mlp.up_proj.weight": "model-00006-of-00010.safetensors", "model.layers.38.post_attention_layernorm.weight": "model-00006-of-00010.safetensors", "model.layers.38.self_attn.k_proj.weight": "model-00006-of-00010.safetensors", "model.layers.38.self_attn.o_proj.weight": "model-00006-of-00010.safetensors", "model.layers.38.self_attn.q_proj.weight": "model-00006-of-00010.safetensors", "model.layers.38.self_attn.v_proj.weight": "model-00006-of-00010.safetensors", "model.layers.39.input_layernorm.weight": "model-00006-of-00010.safetensors", "model.layers.39.mlp.down_proj.weight": "model-00006-of-00010.safetensors", "model.layers.39.mlp.gate_proj.weight": "model-00006-of-00010.safetensors", "model.layers.39.mlp.up_proj.weight": "model-00006-of-00010.safetensors", "model.layers.39.post_attention_layernorm.weight": "model-00006-of-00010.safetensors", "model.layers.39.self_attn.k_proj.weight": "model-00006-of-00010.safetensors", "model.layers.39.self_attn.o_proj.weight": "model-00006-of-00010.safetensors", "model.layers.39.self_attn.q_proj.weight": "model-00006-of-00010.safetensors", "model.layers.39.self_attn.v_proj.weight": "model-00006-of-00010.safetensors", "model.layers.4.input_layernorm.weight": "model-00006-of-00010.safetensors", "model.layers.4.mlp.down_proj.weight": "model-00006-of-00010.safetensors", "model.layers.4.mlp.gate_proj.weight": "model-00006-of-00010.safetensors", "model.layers.4.mlp.up_proj.weight": "model-00006-of-00010.safetensors", "model.layers.4.post_attention_layernorm.weight": "model-00006-of-00010.safetensors", "model.layers.4.self_attn.k_proj.weight": "model-00006-of-00010.safetensors", "model.layers.4.self_attn.o_proj.weight": "model-00006-of-00010.safetensors", "model.layers.4.self_attn.q_proj.weight": "model-00006-of-00010.safetensors", "model.layers.4.self_attn.v_proj.weight": "model-00006-of-00010.safetensors", "model.layers.40.input_layernorm.weight": "model-00006-of-00010.safetensors", "model.layers.40.mlp.down_proj.weight": "model-00006-of-00010.safetensors", "model.layers.40.mlp.gate_proj.weight": "model-00006-of-00010.safetensors", "model.layers.40.mlp.up_proj.weight": "model-00006-of-00010.safetensors", "model.layers.40.post_attention_layernorm.weight": "model-00006-of-00010.safetensors", "model.layers.40.self_attn.k_proj.weight": "model-00006-of-00010.safetensors", "model.layers.40.self_attn.o_proj.weight": "model-00006-of-00010.safetensors", "model.layers.40.self_attn.q_proj.weight": "model-00006-of-00010.safetensors", "model.layers.40.self_attn.v_proj.weight": "model-00006-of-00010.safetensors", "model.layers.41.input_layernorm.weight": "model-00006-of-00010.safetensors", "model.layers.41.mlp.down_proj.weight": "model-00006-of-00010.safetensors", "model.layers.41.mlp.gate_proj.weight": "model-00006-of-00010.safetensors", "model.layers.41.mlp.up_proj.weight": "model-00006-of-00010.safetensors", "model.layers.41.post_attention_layernorm.weight": "model-00006-of-00010.safetensors", "model.layers.41.self_attn.k_proj.weight": "model-00006-of-00010.safetensors", "model.layers.41.self_attn.o_proj.weight": "model-00006-of-00010.safetensors", "model.layers.41.self_attn.q_proj.weight": "model-00007-of-00010.safetensors", "model.layers.41.self_attn.v_proj.weight": "model-00007-of-00010.safetensors", "model.layers.42.input_layernorm.weight": "model-00007-of-00010.safetensors", "model.layers.42.mlp.down_proj.weight": "model-00007-of-00010.safetensors", "model.layers.42.mlp.gate_proj.weight": "model-00007-of-00010.safetensors", "model.layers.42.mlp.up_proj.weight": "model-00007-of-00010.safetensors", "model.layers.42.post_attention_layernorm.weight": "model-00007-of-00010.safetensors", "model.layers.42.self_attn.k_proj.weight": "model-00007-of-00010.safetensors", "model.layers.42.self_attn.o_proj.weight": "model-00007-of-00010.safetensors", "model.layers.42.self_attn.q_proj.weight": "model-00007-of-00010.safetensors", "model.layers.42.self_attn.v_proj.weight": "model-00007-of-00010.safetensors", "model.layers.43.input_layernorm.weight": "model-00007-of-00010.safetensors", "model.layers.43.mlp.down_proj.weight": "model-00007-of-00010.safetensors", "model.layers.43.mlp.gate_proj.weight": "model-00007-of-00010.safetensors", "model.layers.43.mlp.up_proj.weight": "model-00007-of-00010.safetensors", "model.layers.43.post_attention_layernorm.weight": "model-00007-of-00010.safetensors", "model.layers.43.self_attn.k_proj.weight": "model-00007-of-00010.safetensors", "model.layers.43.self_attn.o_proj.weight": "model-00007-of-00010.safetensors", "model.layers.43.self_attn.q_proj.weight": "model-00007-of-00010.safetensors", "model.layers.43.self_attn.v_proj.weight": "model-00007-of-00010.safetensors", "model.layers.44.input_layernorm.weight": "model-00007-of-00010.safetensors", "model.layers.44.mlp.down_proj.weight": "model-00007-of-00010.safetensors", "model.layers.44.mlp.gate_proj.weight": "model-00007-of-00010.safetensors", "model.layers.44.mlp.up_proj.weight": "model-00007-of-00010.safetensors", "model.layers.44.post_attention_layernorm.weight": "model-00007-of-00010.safetensors", "model.layers.44.self_attn.k_proj.weight": "model-00007-of-00010.safetensors", "model.layers.44.self_attn.o_proj.weight": "model-00007-of-00010.safetensors", "model.layers.44.self_attn.q_proj.weight": "model-00007-of-00010.safetensors", "model.layers.44.self_attn.v_proj.weight": "model-00007-of-00010.safetensors", "model.layers.45.input_layernorm.weight": "model-00007-of-00010.safetensors", "model.layers.45.mlp.down_proj.weight": "model-00007-of-00010.safetensors", "model.layers.45.mlp.gate_proj.weight": "model-00007-of-00010.safetensors", "model.layers.45.mlp.up_proj.weight": "model-00007-of-00010.safetensors", "model.layers.45.post_attention_layernorm.weight": "model-00007-of-00010.safetensors", "model.layers.45.self_attn.k_proj.weight": "model-00007-of-00010.safetensors", "model.layers.45.self_attn.o_proj.weight": "model-00007-of-00010.safetensors", "model.layers.45.self_attn.q_proj.weight": "model-00007-of-00010.safetensors", "model.layers.45.self_attn.v_proj.weight": "model-00007-of-00010.safetensors", "model.layers.46.input_layernorm.weight": "model-00007-of-00010.safetensors", "model.layers.46.mlp.down_proj.weight": "model-00007-of-00010.safetensors", "model.layers.46.mlp.gate_proj.weight": "model-00007-of-00010.safetensors", "model.layers.46.mlp.up_proj.weight": "model-00007-of-00010.safetensors", "model.layers.46.post_attention_layernorm.weight": "model-00007-of-00010.safetensors", "model.layers.46.self_attn.k_proj.weight": "model-00007-of-00010.safetensors", "model.layers.46.self_attn.o_proj.weight": "model-00007-of-00010.safetensors", "model.layers.46.self_attn.q_proj.weight": "model-00007-of-00010.safetensors", "model.layers.46.self_attn.v_proj.weight": "model-00007-of-00010.safetensors", "model.layers.47.input_layernorm.weight": "model-00007-of-00010.safetensors", "model.layers.47.mlp.down_proj.weight": "model-00007-of-00010.safetensors", "model.layers.47.mlp.gate_proj.weight": "model-00007-of-00010.safetensors", "model.layers.47.mlp.up_proj.weight": "model-00007-of-00010.safetensors", "model.layers.47.post_attention_layernorm.weight": "model-00007-of-00010.safetensors", "model.layers.47.self_attn.k_proj.weight": "model-00007-of-00010.safetensors", "model.layers.47.self_attn.o_proj.weight": "model-00007-of-00010.safetensors", "model.layers.47.self_attn.q_proj.weight": "model-00007-of-00010.safetensors", "model.layers.47.self_attn.v_proj.weight": "model-00007-of-00010.safetensors", "model.layers.48.input_layernorm.weight": "model-00007-of-00010.safetensors", "model.layers.48.mlp.down_proj.weight": "model-00007-of-00010.safetensors", "model.layers.48.mlp.gate_proj.weight": "model-00008-of-00010.safetensors", "model.layers.48.mlp.up_proj.weight": "model-00008-of-00010.safetensors", "model.layers.48.post_attention_layernorm.weight": "model-00008-of-00010.safetensors", "model.layers.48.self_attn.k_proj.weight": "model-00008-of-00010.safetensors", "model.layers.48.self_attn.o_proj.weight": "model-00008-of-00010.safetensors", "model.layers.48.self_attn.q_proj.weight": "model-00008-of-00010.safetensors", "model.layers.48.self_attn.v_proj.weight": "model-00008-of-00010.safetensors", "model.layers.49.input_layernorm.weight": "model-00008-of-00010.safetensors", "model.layers.49.mlp.down_proj.weight": "model-00008-of-00010.safetensors", "model.layers.49.mlp.gate_proj.weight": "model-00008-of-00010.safetensors", "model.layers.49.mlp.up_proj.weight": "model-00008-of-00010.safetensors", "model.layers.49.post_attention_layernorm.weight": "model-00008-of-00010.safetensors", "model.layers.49.self_attn.k_proj.weight": "model-00008-of-00010.safetensors", "model.layers.49.self_attn.o_proj.weight": "model-00008-of-00010.safetensors", "model.layers.49.self_attn.q_proj.weight": "model-00008-of-00010.safetensors", "model.layers.49.self_attn.v_proj.weight": "model-00008-of-00010.safetensors", "model.layers.5.input_layernorm.weight": "model-00008-of-00010.safetensors", "model.layers.5.mlp.down_proj.weight": "model-00008-of-00010.safetensors", "model.layers.5.mlp.gate_proj.weight": "model-00008-of-00010.safetensors", "model.layers.5.mlp.up_proj.weight": "model-00008-of-00010.safetensors", "model.layers.5.post_attention_layernorm.weight": "model-00008-of-00010.safetensors", "model.layers.5.self_attn.k_proj.weight": "model-00008-of-00010.safetensors", "model.layers.5.self_attn.o_proj.weight": "model-00008-of-00010.safetensors", "model.layers.5.self_attn.q_proj.weight": "model-00008-of-00010.safetensors", "model.layers.5.self_attn.v_proj.weight": "model-00008-of-00010.safetensors", "model.layers.50.input_layernorm.weight": "model-00008-of-00010.safetensors", "model.layers.50.mlp.down_proj.weight": "model-00008-of-00010.safetensors", "model.layers.50.mlp.gate_proj.weight": "model-00008-of-00010.safetensors", "model.layers.50.mlp.up_proj.weight": "model-00008-of-00010.safetensors", "model.layers.50.post_attention_layernorm.weight": "model-00008-of-00010.safetensors", "model.layers.50.self_attn.k_proj.weight": "model-00008-of-00010.safetensors", "model.layers.50.self_attn.o_proj.weight": "model-00008-of-00010.safetensors", "model.layers.50.self_attn.q_proj.weight": "model-00008-of-00010.safetensors", "model.layers.50.self_attn.v_proj.weight": "model-00008-of-00010.safetensors", "model.layers.51.input_layernorm.weight": "model-00008-of-00010.safetensors", "model.layers.51.mlp.down_proj.weight": "model-00008-of-00010.safetensors", "model.layers.51.mlp.gate_proj.weight": "model-00008-of-00010.safetensors", "model.layers.51.mlp.up_proj.weight": "model-00008-of-00010.safetensors", "model.layers.51.post_attention_layernorm.weight": "model-00008-of-00010.safetensors", "model.layers.51.self_attn.k_proj.weight": "model-00008-of-00010.safetensors", "model.layers.51.self_attn.o_proj.weight": "model-00008-of-00010.safetensors", "model.layers.51.self_attn.q_proj.weight": "model-00008-of-00010.safetensors", "model.layers.51.self_attn.v_proj.weight": "model-00008-of-00010.safetensors", "model.layers.52.input_layernorm.weight": "model-00008-of-00010.safetensors", "model.layers.52.mlp.down_proj.weight": "model-00008-of-00010.safetensors", "model.layers.52.mlp.gate_proj.weight": "model-00008-of-00010.safetensors", "model.layers.52.mlp.up_proj.weight": "model-00008-of-00010.safetensors", "model.layers.52.post_attention_layernorm.weight": "model-00008-of-00010.safetensors", "model.layers.52.self_attn.k_proj.weight": "model-00008-of-00010.safetensors", "model.layers.52.self_attn.o_proj.weight": "model-00008-of-00010.safetensors", "model.layers.52.self_attn.q_proj.weight": "model-00008-of-00010.safetensors", "model.layers.52.self_attn.v_proj.weight": "model-00008-of-00010.safetensors", "model.layers.53.input_layernorm.weight": "model-00008-of-00010.safetensors", "model.layers.53.mlp.down_proj.weight": "model-00008-of-00010.safetensors", "model.layers.53.mlp.gate_proj.weight": "model-00008-of-00010.safetensors", "model.layers.53.mlp.up_proj.weight": "model-00009-of-00010.safetensors", "model.layers.53.post_attention_layernorm.weight": "model-00009-of-00010.safetensors", "model.layers.53.self_attn.k_proj.weight": "model-00009-of-00010.safetensors", "model.layers.53.self_attn.o_proj.weight": "model-00009-of-00010.safetensors", "model.layers.53.self_attn.q_proj.weight": "model-00009-of-00010.safetensors", "model.layers.53.self_attn.v_proj.weight": "model-00009-of-00010.safetensors", "model.layers.54.input_layernorm.weight": "model-00009-of-00010.safetensors", "model.layers.54.mlp.down_proj.weight": "model-00009-of-00010.safetensors", "model.layers.54.mlp.gate_proj.weight": "model-00009-of-00010.safetensors", "model.layers.54.mlp.up_proj.weight": "model-00009-of-00010.safetensors", "model.layers.54.post_attention_layernorm.weight": "model-00009-of-00010.safetensors", "model.layers.54.self_attn.k_proj.weight": "model-00009-of-00010.safetensors", "model.layers.54.self_attn.o_proj.weight": "model-00009-of-00010.safetensors", "model.layers.54.self_attn.q_proj.weight": "model-00009-of-00010.safetensors", "model.layers.54.self_attn.v_proj.weight": "model-00009-of-00010.safetensors", "model.layers.55.input_layernorm.weight": "model-00009-of-00010.safetensors", "model.layers.55.mlp.down_proj.weight": "model-00009-of-00010.safetensors", "model.layers.55.mlp.gate_proj.weight": "model-00009-of-00010.safetensors", "model.layers.55.mlp.up_proj.weight": "model-00009-of-00010.safetensors", "model.layers.55.post_attention_layernorm.weight": "model-00009-of-00010.safetensors", "model.layers.55.self_attn.k_proj.weight": "model-00009-of-00010.safetensors", "model.layers.55.self_attn.o_proj.weight": "model-00009-of-00010.safetensors", "model.layers.55.self_attn.q_proj.weight": "model-00009-of-00010.safetensors", "model.layers.55.self_attn.v_proj.weight": "model-00009-of-00010.safetensors", "model.layers.6.input_layernorm.weight": "model-00009-of-00010.safetensors", "model.layers.6.mlp.down_proj.weight": "model-00009-of-00010.safetensors", "model.layers.6.mlp.gate_proj.weight": "model-00009-of-00010.safetensors", "model.layers.6.mlp.up_proj.weight": "model-00009-of-00010.safetensors", "model.layers.6.post_attention_layernorm.weight": "model-00009-of-00010.safetensors", "model.layers.6.self_attn.k_proj.weight": "model-00009-of-00010.safetensors", "model.layers.6.self_attn.o_proj.weight": "model-00009-of-00010.safetensors", "model.layers.6.self_attn.q_proj.weight": "model-00009-of-00010.safetensors", "model.layers.6.self_attn.v_proj.weight": "model-00009-of-00010.safetensors", "model.layers.7.input_layernorm.weight": "model-00009-of-00010.safetensors", "model.layers.7.mlp.down_proj.weight": "model-00009-of-00010.safetensors", "model.layers.7.mlp.gate_proj.weight": "model-00009-of-00010.safetensors", "model.layers.7.mlp.up_proj.weight": "model-00009-of-00010.safetensors", "model.layers.7.post_attention_layernorm.weight": "model-00009-of-00010.safetensors", "model.layers.7.self_attn.k_proj.weight": "model-00009-of-00010.safetensors", "model.layers.7.self_attn.o_proj.weight": "model-00009-of-00010.safetensors", "model.layers.7.self_attn.q_proj.weight": "model-00009-of-00010.safetensors", "model.layers.7.self_attn.v_proj.weight": "model-00009-of-00010.safetensors", "model.layers.8.input_layernorm.weight": "model-00009-of-00010.safetensors", "model.layers.8.mlp.down_proj.weight": "model-00009-of-00010.safetensors", "model.layers.8.mlp.gate_proj.weight": "model-00009-of-00010.safetensors", "model.layers.8.mlp.up_proj.weight": "model-00009-of-00010.safetensors", "model.layers.8.post_attention_layernorm.weight": "model-00009-of-00010.safetensors", "model.layers.8.self_attn.k_proj.weight": "model-00009-of-00010.safetensors", "model.layers.8.self_attn.o_proj.weight": "model-00009-of-00010.safetensors", "model.layers.8.self_attn.q_proj.weight": "model-00009-of-00010.safetensors", "model.layers.8.self_attn.v_proj.weight": "model-00009-of-00010.safetensors", "model.layers.9.input_layernorm.weight": "model-00009-of-00010.safetensors", "model.layers.9.mlp.down_proj.weight": "model-00009-of-00010.safetensors", "model.layers.9.mlp.gate_proj.weight": "model-00009-of-00010.safetensors", "model.layers.9.mlp.up_proj.weight": "model-00009-of-00010.safetensors", "model.layers.9.post_attention_layernorm.weight": "model-00009-of-00010.safetensors", "model.layers.9.self_attn.k_proj.weight": "model-00009-of-00010.safetensors", "model.layers.9.self_attn.o_proj.weight": "model-00009-of-00010.safetensors", "model.layers.9.self_attn.q_proj.weight": "model-00010-of-00010.safetensors", "model.layers.9.self_attn.v_proj.weight": "model-00010-of-00010.safetensors", "model.norm.weight": "model-00010-of-00010.safetensors"}}
output-00001-of-00002.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7ee1d4bf3d5eb533bc6726e03012231d87c8916725485475f895d5f8bfd6e4d6
3
+ size 8546584672
output-00002-of-00002.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3512fffea63f8fd96374f264df99fdb7aef8cbbdb5a8496e628fee701b099a8c
3
+ size 8442214576
special_tokens_map.json ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": {
3
+ "content": "<s>",
4
+ "lstrip": false,
5
+ "normalized": false,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "</s>",
11
+ "lstrip": false,
12
+ "normalized": false,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "pad_token": {
17
+ "content": "</s>",
18
+ "lstrip": false,
19
+ "normalized": false,
20
+ "rstrip": false,
21
+ "single_word": false
22
+ },
23
+ "unk_token": {
24
+ "content": "<unk>",
25
+ "lstrip": false,
26
+ "normalized": false,
27
+ "rstrip": false,
28
+ "single_word": false
29
+ }
30
+ }
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer.model ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:59f95e28944c062244741268596badc900df86c7f5ded05088d2da22a7379e06
3
+ size 587583
tokenizer_config.json ADDED
The diff for this file is too large to render. See raw diff
 
waifu6.webp ADDED

Git LFS Details

  • SHA256: 35bfc56bfd47aef16f700c91aa7e1565d8771f14bab367ecc22876ae497599b5
  • Pointer size: 133 Bytes
  • Size of remote file: 19.9 MB