sandmanbuzz commited on
Commit
0302fdd
·
verified ·
1 Parent(s): e027923

crest-e2-clash-e2-faint

Browse files
README.md ADDED
@@ -0,0 +1,125 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ base_model:
3
+ - mistralai/Mixtral-8x7B-v0.1
4
+ library_name: transformers
5
+ tags:
6
+ - mergekit
7
+ - merge
8
+
9
+ ---
10
+ # uphill-instruct-crest-e2-clash-e2-lime-faint-try1
11
+
12
+ This is a merge of pre-trained language models created using [mergekit](https://github.com/cg123/mergekit).
13
+
14
+ ## Merge Details
15
+ ### Merge Method
16
+
17
+ This model was merged using the [DARE TIES](https://arxiv.org/abs/2311.03099) merge method using [mistralai/Mixtral-8x7B-v0.1](https://huggingface.co/mistralai/Mixtral-8x7B-v0.1) as a base.
18
+
19
+ ### Models Merged
20
+
21
+ The following models were included in the merge:
22
+ * ./Mixtral-8x7B-Yes-Instruct-LimaRP
23
+ * ./uphill-instruct-crest-e2-nolime
24
+ * ./uphill-pure-clash-0.2-e2
25
+
26
+ ### Configuration
27
+
28
+ The following YAML configuration was used to produce this model:
29
+
30
+ ```yaml
31
+ # Faint tecnnique, crest-e2 clash-e1
32
+ #
33
+ # review:
34
+ # - Instruction-following:
35
+ # - Swerve:
36
+ # - Word choice:
37
+ # - Rhythm, cadence:
38
+ # - Notes:
39
+ # -
40
+ #
41
+ # - Design:
42
+ # The idea here is to cut crush -- formerly the very cornerstone
43
+ # of our merges -- completely out. it's very good for word choice
44
+ # but crest is, too. The only problem is I seem to remember that
45
+ # crest is overfit. So, we make it faint.
46
+ #
47
+ # Note: nearly two years later I'm trying to bring Mixtral
48
+ # back from the dead. There are multiple reasons:
49
+ # 1. Mistral-Small is kind of crap and smells like slop.
50
+ # Hell, even the comprehension felt weak but maybe that's
51
+ # just how I tried to sample it.
52
+ # 2. Llama3 hasn't been interesting and is definitely crammed
53
+ # with slop.
54
+ # 3. Mixtral is probably the least synthetic-trained sounding
55
+ # of all the OG models. Even when I tried the Quen shit
56
+ # it seemed to be just openai. Mixtral is still sloppy.
57
+ #
58
+ # So, the pieces that are ours are uphill: non-instruct lora
59
+ # being applied to the instruct rawdog without an intermediate
60
+ # step.
61
+ #
62
+ # Obviously we're using pure elemental antisoc loras, hush's shit
63
+ # but not her merge because the merges aren't "uphill", as in,
64
+ # a lora made with "mixtral non-instruct" applied straight to
65
+ # the instruct with loraize.
66
+ #
67
+ # The notion, which came to me in the middle of the night, is
68
+ # to have the hush loras be only barely present layer-wise but
69
+ # weighted heavily. Likewise with LimaRP, send uphill from
70
+ # doctor-shotgun's qlora straight into mixtral-instruct
71
+ #
72
+ # My hypothesis is that we should get really fucking close to
73
+ # pure-ass mixtral-instruct in terms of attention, but that
74
+ # we're weighting really hard not to write like it. I have no
75
+ # idea if that's how it works--I'm a fucking caveman.
76
+ #
77
+ # What I'm given to understand, and I'm way out of my depth,
78
+ # is that the antisoc layers won't have blotched the instruct
79
+ # as badly as they usually do, but when they're triggered they
80
+ # are dominant. It's entirely possible I've got no idea what
81
+ # I'm saying.
82
+
83
+ # Model descriptions:
84
+ # - crush: poetry; we have all checkpoints
85
+ # - crest: fic; we only have e2 for this
86
+ # - clash: novels (I think); we have all checkpoints for 0.2
87
+ models:
88
+ # I wonder what happens if we just hurl this out the window
89
+ # - model: mistralai/Mixtral-8x7B-Instruct-v0.1
90
+ # parameters:
91
+ # density: 0.9
92
+ # weight: 0.55
93
+ #
94
+ # crest is fic
95
+ - model: ./uphill-instruct-crest-e2-nolime
96
+ # i found lima in this, I need to cook another
97
+ parameters:
98
+ density: 0.4
99
+ weight: 0.3
100
+ # This is actually an uphill lima but I didn't name it that way.
101
+ - model: ./Mixtral-8x7B-Yes-Instruct-LimaRP
102
+ parameters:
103
+ # Still just a breath of layers from the thing
104
+ density: 0.2
105
+ # I am gimping its weight compared to hush tunes because limarp has too
106
+ # much ai-slop and amateur-smut cliche slop. Honestly, if there were
107
+ # something better than limarp I'd try to train it myself but I don't
108
+ # know if there is.
109
+ weight: 0.1
110
+ # Pure uphill clash at e2. Also more weight.
111
+ - model: ./uphill-pure-clash-0.2-e2
112
+ parameters:
113
+ density: 0.5
114
+ weight: 0.6
115
+ # della sucked ass so dare_ties it is
116
+ merge_method: dare_ties
117
+ # I know all of these look like instruct but the lora
118
+ # is actually not so we go to the base base
119
+ base_model: mistralai/Mixtral-8x7B-v0.1
120
+ parameters:
121
+ normalize: true
122
+ int8_mask: true
123
+ dtype: bfloat16
124
+
125
+ ```
config.json ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "MixtralForCausalLM"
4
+ ],
5
+ "attention_dropout": 0.0,
6
+ "bos_token_id": 1,
7
+ "eos_token_id": 2,
8
+ "head_dim": null,
9
+ "hidden_act": "silu",
10
+ "hidden_size": 4096,
11
+ "initializer_range": 0.02,
12
+ "intermediate_size": 14336,
13
+ "max_position_embeddings": 32768,
14
+ "model_type": "mixtral",
15
+ "num_attention_heads": 32,
16
+ "num_experts_per_tok": 2,
17
+ "num_hidden_layers": 32,
18
+ "num_key_value_heads": 8,
19
+ "num_local_experts": 8,
20
+ "output_router_logits": false,
21
+ "rms_norm_eps": 1e-05,
22
+ "rope_theta": 1000000.0,
23
+ "router_aux_loss_coef": 0.02,
24
+ "router_jitter_noise": 0.0,
25
+ "sliding_window": null,
26
+ "tie_word_embeddings": false,
27
+ "torch_dtype": "bfloat16",
28
+ "transformers_version": "4.52.4",
29
+ "use_cache": true,
30
+ "vocab_size": 32000
31
+ }
mergekit_config.yml ADDED
@@ -0,0 +1,93 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Faint tecnnique, crest-e2 clash-e1
2
+ #
3
+ # review:
4
+ # - Instruction-following:
5
+ # - Swerve:
6
+ # - Word choice:
7
+ # - Rhythm, cadence:
8
+ # - Notes:
9
+ # -
10
+ #
11
+ # - Design:
12
+ # The idea here is to cut crush -- formerly the very cornerstone
13
+ # of our merges -- completely out. it's very good for word choice
14
+ # but crest is, too. The only problem is I seem to remember that
15
+ # crest is overfit. So, we make it faint.
16
+ #
17
+ # Note: nearly two years later I'm trying to bring Mixtral
18
+ # back from the dead. There are multiple reasons:
19
+ # 1. Mistral-Small is kind of crap and smells like slop.
20
+ # Hell, even the comprehension felt weak but maybe that's
21
+ # just how I tried to sample it.
22
+ # 2. Llama3 hasn't been interesting and is definitely crammed
23
+ # with slop.
24
+ # 3. Mixtral is probably the least synthetic-trained sounding
25
+ # of all the OG models. Even when I tried the Quen shit
26
+ # it seemed to be just openai. Mixtral is still sloppy.
27
+ #
28
+ # So, the pieces that are ours are uphill: non-instruct lora
29
+ # being applied to the instruct rawdog without an intermediate
30
+ # step.
31
+ #
32
+ # Obviously we're using pure elemental antisoc loras, hush's shit
33
+ # but not her merge because the merges aren't "uphill", as in,
34
+ # a lora made with "mixtral non-instruct" applied straight to
35
+ # the instruct with loraize.
36
+ #
37
+ # The notion, which came to me in the middle of the night, is
38
+ # to have the hush loras be only barely present layer-wise but
39
+ # weighted heavily. Likewise with LimaRP, send uphill from
40
+ # doctor-shotgun's qlora straight into mixtral-instruct
41
+ #
42
+ # My hypothesis is that we should get really fucking close to
43
+ # pure-ass mixtral-instruct in terms of attention, but that
44
+ # we're weighting really hard not to write like it. I have no
45
+ # idea if that's how it works--I'm a fucking caveman.
46
+ #
47
+ # What I'm given to understand, and I'm way out of my depth,
48
+ # is that the antisoc layers won't have blotched the instruct
49
+ # as badly as they usually do, but when they're triggered they
50
+ # are dominant. It's entirely possible I've got no idea what
51
+ # I'm saying.
52
+
53
+ # Model descriptions:
54
+ # - crush: poetry; we have all checkpoints
55
+ # - crest: fic; we only have e2 for this
56
+ # - clash: novels (I think); we have all checkpoints for 0.2
57
+ models:
58
+ # I wonder what happens if we just hurl this out the window
59
+ # - model: mistralai/Mixtral-8x7B-Instruct-v0.1
60
+ # parameters:
61
+ # density: 0.9
62
+ # weight: 0.55
63
+ #
64
+ # crest is fic
65
+ - model: ./uphill-instruct-crest-e2-nolime
66
+ # i found lima in this, I need to cook another
67
+ parameters:
68
+ density: 0.4
69
+ weight: 0.3
70
+ # This is actually an uphill lima but I didn't name it that way.
71
+ - model: ./Mixtral-8x7B-Yes-Instruct-LimaRP
72
+ parameters:
73
+ # Still just a breath of layers from the thing
74
+ density: 0.2
75
+ # I am gimping its weight compared to hush tunes because limarp has too
76
+ # much ai-slop and amateur-smut cliche slop. Honestly, if there were
77
+ # something better than limarp I'd try to train it myself but I don't
78
+ # know if there is.
79
+ weight: 0.1
80
+ # Pure uphill clash at e2. Also more weight.
81
+ - model: ./uphill-pure-clash-0.2-e2
82
+ parameters:
83
+ density: 0.5
84
+ weight: 0.6
85
+ # della sucked ass so dare_ties it is
86
+ merge_method: dare_ties
87
+ # I know all of these look like instruct but the lora
88
+ # is actually not so we go to the base base
89
+ base_model: mistralai/Mixtral-8x7B-v0.1
90
+ parameters:
91
+ normalize: true
92
+ int8_mask: true
93
+ dtype: bfloat16
model-00001-of-00019.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1ea9d82d2219b120cb0df285cb3440d4d0a5c911a9c34a9fa99bbb05333e7bb2
3
+ size 4953560832
model-00002-of-00019.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0cfa68d54d4d9d4de6f5aeb5d9603e9e1f7d6384f868c52f0a84ea72b9160834
3
+ size 4983004056
model-00003-of-00019.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c15cb733a337efcd9950ffde25ef63f1e5aaacefd70e0fa000518a3025025c15
3
+ size 4899035248
model-00004-of-00019.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9eadc77a9d5ec8ad267b0595b8f87813380babbddaafcd035d1fcd596fb0be38
3
+ size 4983004064
model-00005-of-00019.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b38ee1d327083fefdc7d3f52c0975e24af70ea8123c469d3e0c0ecb3f1157e1f
3
+ size 4983004064
model-00006-of-00019.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:945cc75fa63d46f2a155784f51c21801196ac502bfe99e7ff8a212a008eced49
3
+ size 4983004072
model-00007-of-00019.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8eb056a4ec02291992a48ef586a23a4afd849f8bf39de0e772115b3af54de111
3
+ size 4899035248
model-00008-of-00019.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1d19db830e089a6854d200ee990d391f435b417d7c8d1c8db7741b956e2dfeca
3
+ size 4983004032
model-00009-of-00019.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1eb85a8a22f8ff368d8ffbac022bc4919aec22db6dbb1f5cfa7d4fe28730c4c4
3
+ size 4983004072
model-00010-of-00019.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6a1e00e39337584f909b9fc6a311645c1df16246b98c13a384c77bc3b9073c87
3
+ size 4899035248
model-00011-of-00019.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:91b62bf4352866d3c7a1c68cc7c68eab98d6eb0bf4aed367d0b033ddfec8e0de
3
+ size 4983004064
model-00012-of-00019.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f4eb0b2c1f51065d1d3fca35887aa43cafcdf28839067e8a75d53b232b570f58
3
+ size 4983004072
model-00013-of-00019.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9eeff2e0fb36ad9344d02e2ea6d29daf0f482e421c95c8cbb4ada5d29bf8ea6e
3
+ size 4983004072
model-00014-of-00019.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:93e62b315c5140e79efefd2e1522fb4da8a2b34ffe2d7a66db82dda140ef6667
3
+ size 4899035232
model-00015-of-00019.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8c8d53bb79dfb28eb5a00a9f1ebeef2242f4c94d74b633220602c8bdce14cc80
3
+ size 4983004048
model-00016-of-00019.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:399b37f76e15ea49c6e2cfd3b62453d9f9e63095328df38a01c1b9542acaee46
3
+ size 4983004040
model-00017-of-00019.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8a3b582dd3339ea1b371a459632b0f9f8993e10896be533bd80f237d758e03c9
3
+ size 4899035200
model-00018-of-00019.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bc7d022d3e3199e1bce49d24b5a02e182992cc769bf9d4a183f5fb8a49d48f05
3
+ size 4983004008
model-00019-of-00019.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2e3bdadab10b89b9b4d3f46464a29568ea375f774ed66e9a803f05de79d1b4b8
3
+ size 4160927800
model.safetensors.index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"metadata": {"mergekit_version": "0.1.3"}, "weight_map": {"lm_head.weight": "model-00001-of-00019.safetensors", "model.embed_tokens.weight": "model-00001-of-00019.safetensors", "model.layers.0.block_sparse_moe.experts.0.w1.weight": "model-00001-of-00019.safetensors", "model.layers.0.block_sparse_moe.experts.0.w2.weight": "model-00001-of-00019.safetensors", "model.layers.0.block_sparse_moe.experts.0.w3.weight": "model-00001-of-00019.safetensors", "model.layers.0.block_sparse_moe.experts.1.w1.weight": "model-00001-of-00019.safetensors", "model.layers.0.block_sparse_moe.experts.1.w2.weight": "model-00001-of-00019.safetensors", "model.layers.0.block_sparse_moe.experts.1.w3.weight": "model-00001-of-00019.safetensors", "model.layers.0.block_sparse_moe.experts.2.w1.weight": "model-00001-of-00019.safetensors", "model.layers.0.block_sparse_moe.experts.2.w2.weight": "model-00001-of-00019.safetensors", "model.layers.0.block_sparse_moe.experts.2.w3.weight": "model-00001-of-00019.safetensors", "model.layers.0.block_sparse_moe.experts.3.w1.weight": "model-00001-of-00019.safetensors", "model.layers.0.block_sparse_moe.experts.3.w2.weight": "model-00001-of-00019.safetensors", "model.layers.0.block_sparse_moe.experts.3.w3.weight": "model-00001-of-00019.safetensors", "model.layers.0.block_sparse_moe.experts.4.w1.weight": "model-00001-of-00019.safetensors", "model.layers.0.block_sparse_moe.experts.4.w2.weight": "model-00001-of-00019.safetensors", "model.layers.0.block_sparse_moe.experts.4.w3.weight": "model-00001-of-00019.safetensors", "model.layers.0.block_sparse_moe.experts.5.w1.weight": "model-00001-of-00019.safetensors", "model.layers.0.block_sparse_moe.experts.5.w2.weight": "model-00001-of-00019.safetensors", "model.layers.0.block_sparse_moe.experts.5.w3.weight": "model-00001-of-00019.safetensors", "model.layers.0.block_sparse_moe.experts.6.w1.weight": "model-00001-of-00019.safetensors", "model.layers.0.block_sparse_moe.experts.6.w2.weight": "model-00001-of-00019.safetensors", "model.layers.0.block_sparse_moe.experts.6.w3.weight": "model-00001-of-00019.safetensors", "model.layers.0.block_sparse_moe.experts.7.w1.weight": "model-00001-of-00019.safetensors", "model.layers.0.block_sparse_moe.experts.7.w2.weight": "model-00001-of-00019.safetensors", "model.layers.0.block_sparse_moe.experts.7.w3.weight": "model-00001-of-00019.safetensors", "model.layers.0.block_sparse_moe.gate.weight": "model-00001-of-00019.safetensors", "model.layers.0.input_layernorm.weight": "model-00001-of-00019.safetensors", "model.layers.0.post_attention_layernorm.weight": "model-00001-of-00019.safetensors", "model.layers.0.self_attn.k_proj.weight": "model-00001-of-00019.safetensors", "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00019.safetensors", "model.layers.0.self_attn.q_proj.weight": "model-00001-of-00019.safetensors", "model.layers.0.self_attn.v_proj.weight": "model-00001-of-00019.safetensors", "model.layers.1.block_sparse_moe.experts.0.w1.weight": "model-00001-of-00019.safetensors", "model.layers.1.block_sparse_moe.experts.0.w2.weight": "model-00001-of-00019.safetensors", "model.layers.1.block_sparse_moe.experts.0.w3.weight": "model-00001-of-00019.safetensors", "model.layers.1.block_sparse_moe.experts.1.w1.weight": "model-00001-of-00019.safetensors", "model.layers.1.block_sparse_moe.experts.1.w2.weight": "model-00001-of-00019.safetensors", "model.layers.1.block_sparse_moe.experts.1.w3.weight": "model-00001-of-00019.safetensors", "model.layers.1.block_sparse_moe.experts.2.w1.weight": "model-00001-of-00019.safetensors", "model.layers.1.block_sparse_moe.experts.2.w2.weight": "model-00001-of-00019.safetensors", "model.layers.1.block_sparse_moe.experts.2.w3.weight": "model-00001-of-00019.safetensors", "model.layers.1.block_sparse_moe.experts.3.w1.weight": "model-00001-of-00019.safetensors", "model.layers.1.block_sparse_moe.experts.3.w2.weight": "model-00001-of-00019.safetensors", "model.layers.1.block_sparse_moe.experts.3.w3.weight": "model-00001-of-00019.safetensors", "model.layers.1.block_sparse_moe.experts.4.w1.weight": "model-00001-of-00019.safetensors", "model.layers.1.block_sparse_moe.experts.4.w2.weight": "model-00002-of-00019.safetensors", "model.layers.1.block_sparse_moe.experts.4.w3.weight": "model-00002-of-00019.safetensors", "model.layers.1.block_sparse_moe.experts.5.w1.weight": "model-00002-of-00019.safetensors", "model.layers.1.block_sparse_moe.experts.5.w2.weight": "model-00002-of-00019.safetensors", "model.layers.1.block_sparse_moe.experts.5.w3.weight": "model-00002-of-00019.safetensors", "model.layers.1.block_sparse_moe.experts.6.w1.weight": "model-00002-of-00019.safetensors", "model.layers.1.block_sparse_moe.experts.6.w2.weight": "model-00002-of-00019.safetensors", "model.layers.1.block_sparse_moe.experts.6.w3.weight": "model-00002-of-00019.safetensors", "model.layers.1.block_sparse_moe.experts.7.w1.weight": "model-00002-of-00019.safetensors", "model.layers.1.block_sparse_moe.experts.7.w2.weight": "model-00002-of-00019.safetensors", "model.layers.1.block_sparse_moe.experts.7.w3.weight": "model-00002-of-00019.safetensors", "model.layers.1.block_sparse_moe.gate.weight": "model-00002-of-00019.safetensors", "model.layers.1.input_layernorm.weight": "model-00002-of-00019.safetensors", "model.layers.1.post_attention_layernorm.weight": "model-00002-of-00019.safetensors", "model.layers.1.self_attn.k_proj.weight": "model-00002-of-00019.safetensors", "model.layers.1.self_attn.o_proj.weight": "model-00002-of-00019.safetensors", "model.layers.1.self_attn.q_proj.weight": "model-00002-of-00019.safetensors", "model.layers.1.self_attn.v_proj.weight": "model-00002-of-00019.safetensors", "model.layers.10.block_sparse_moe.experts.0.w1.weight": "model-00002-of-00019.safetensors", "model.layers.10.block_sparse_moe.experts.0.w2.weight": "model-00002-of-00019.safetensors", "model.layers.10.block_sparse_moe.experts.0.w3.weight": "model-00002-of-00019.safetensors", "model.layers.10.block_sparse_moe.experts.1.w1.weight": "model-00002-of-00019.safetensors", "model.layers.10.block_sparse_moe.experts.1.w2.weight": "model-00002-of-00019.safetensors", "model.layers.10.block_sparse_moe.experts.1.w3.weight": "model-00002-of-00019.safetensors", "model.layers.10.block_sparse_moe.experts.2.w1.weight": "model-00002-of-00019.safetensors", "model.layers.10.block_sparse_moe.experts.2.w2.weight": "model-00002-of-00019.safetensors", "model.layers.10.block_sparse_moe.experts.2.w3.weight": "model-00002-of-00019.safetensors", "model.layers.10.block_sparse_moe.experts.3.w1.weight": "model-00002-of-00019.safetensors", "model.layers.10.block_sparse_moe.experts.3.w2.weight": "model-00002-of-00019.safetensors", "model.layers.10.block_sparse_moe.experts.3.w3.weight": "model-00002-of-00019.safetensors", "model.layers.10.block_sparse_moe.experts.4.w1.weight": "model-00002-of-00019.safetensors", "model.layers.10.block_sparse_moe.experts.4.w2.weight": "model-00002-of-00019.safetensors", "model.layers.10.block_sparse_moe.experts.4.w3.weight": "model-00002-of-00019.safetensors", "model.layers.10.block_sparse_moe.experts.5.w1.weight": "model-00002-of-00019.safetensors", "model.layers.10.block_sparse_moe.experts.5.w2.weight": "model-00002-of-00019.safetensors", "model.layers.10.block_sparse_moe.experts.5.w3.weight": "model-00002-of-00019.safetensors", "model.layers.10.block_sparse_moe.experts.6.w1.weight": "model-00002-of-00019.safetensors", "model.layers.10.block_sparse_moe.experts.6.w2.weight": "model-00002-of-00019.safetensors", "model.layers.10.block_sparse_moe.experts.6.w3.weight": "model-00002-of-00019.safetensors", "model.layers.10.block_sparse_moe.experts.7.w1.weight": "model-00002-of-00019.safetensors", "model.layers.10.block_sparse_moe.experts.7.w2.weight": "model-00002-of-00019.safetensors", "model.layers.10.block_sparse_moe.experts.7.w3.weight": "model-00002-of-00019.safetensors", "model.layers.10.block_sparse_moe.gate.weight": "model-00002-of-00019.safetensors", "model.layers.10.input_layernorm.weight": "model-00002-of-00019.safetensors", "model.layers.10.post_attention_layernorm.weight": "model-00002-of-00019.safetensors", "model.layers.10.self_attn.k_proj.weight": "model-00002-of-00019.safetensors", "model.layers.10.self_attn.o_proj.weight": "model-00002-of-00019.safetensors", "model.layers.10.self_attn.q_proj.weight": "model-00002-of-00019.safetensors", "model.layers.10.self_attn.v_proj.weight": "model-00002-of-00019.safetensors", "model.layers.11.block_sparse_moe.experts.0.w1.weight": "model-00002-of-00019.safetensors", "model.layers.11.block_sparse_moe.experts.0.w2.weight": "model-00002-of-00019.safetensors", "model.layers.11.block_sparse_moe.experts.0.w3.weight": "model-00002-of-00019.safetensors", "model.layers.11.block_sparse_moe.experts.1.w1.weight": "model-00002-of-00019.safetensors", "model.layers.11.block_sparse_moe.experts.1.w2.weight": "model-00002-of-00019.safetensors", "model.layers.11.block_sparse_moe.experts.1.w3.weight": "model-00002-of-00019.safetensors", "model.layers.11.block_sparse_moe.experts.2.w1.weight": "model-00003-of-00019.safetensors", "model.layers.11.block_sparse_moe.experts.2.w2.weight": "model-00003-of-00019.safetensors", "model.layers.11.block_sparse_moe.experts.2.w3.weight": "model-00003-of-00019.safetensors", "model.layers.11.block_sparse_moe.experts.3.w1.weight": "model-00003-of-00019.safetensors", "model.layers.11.block_sparse_moe.experts.3.w2.weight": "model-00003-of-00019.safetensors", "model.layers.11.block_sparse_moe.experts.3.w3.weight": "model-00003-of-00019.safetensors", "model.layers.11.block_sparse_moe.experts.4.w1.weight": "model-00003-of-00019.safetensors", "model.layers.11.block_sparse_moe.experts.4.w2.weight": "model-00003-of-00019.safetensors", "model.layers.11.block_sparse_moe.experts.4.w3.weight": "model-00003-of-00019.safetensors", "model.layers.11.block_sparse_moe.experts.5.w1.weight": "model-00003-of-00019.safetensors", "model.layers.11.block_sparse_moe.experts.5.w2.weight": "model-00003-of-00019.safetensors", "model.layers.11.block_sparse_moe.experts.5.w3.weight": "model-00003-of-00019.safetensors", "model.layers.11.block_sparse_moe.experts.6.w1.weight": "model-00003-of-00019.safetensors", "model.layers.11.block_sparse_moe.experts.6.w2.weight": "model-00003-of-00019.safetensors", "model.layers.11.block_sparse_moe.experts.6.w3.weight": "model-00003-of-00019.safetensors", "model.layers.11.block_sparse_moe.experts.7.w1.weight": "model-00003-of-00019.safetensors", "model.layers.11.block_sparse_moe.experts.7.w2.weight": "model-00003-of-00019.safetensors", "model.layers.11.block_sparse_moe.experts.7.w3.weight": "model-00003-of-00019.safetensors", "model.layers.11.block_sparse_moe.gate.weight": "model-00003-of-00019.safetensors", "model.layers.11.input_layernorm.weight": "model-00003-of-00019.safetensors", "model.layers.11.post_attention_layernorm.weight": "model-00003-of-00019.safetensors", "model.layers.11.self_attn.k_proj.weight": "model-00003-of-00019.safetensors", "model.layers.11.self_attn.o_proj.weight": "model-00003-of-00019.safetensors", "model.layers.11.self_attn.q_proj.weight": "model-00003-of-00019.safetensors", "model.layers.11.self_attn.v_proj.weight": "model-00003-of-00019.safetensors", "model.layers.12.block_sparse_moe.experts.0.w1.weight": "model-00003-of-00019.safetensors", "model.layers.12.block_sparse_moe.experts.0.w2.weight": "model-00003-of-00019.safetensors", "model.layers.12.block_sparse_moe.experts.0.w3.weight": "model-00003-of-00019.safetensors", "model.layers.12.block_sparse_moe.experts.1.w1.weight": "model-00003-of-00019.safetensors", "model.layers.12.block_sparse_moe.experts.1.w2.weight": "model-00003-of-00019.safetensors", "model.layers.12.block_sparse_moe.experts.1.w3.weight": "model-00003-of-00019.safetensors", "model.layers.12.block_sparse_moe.experts.2.w1.weight": "model-00003-of-00019.safetensors", "model.layers.12.block_sparse_moe.experts.2.w2.weight": "model-00003-of-00019.safetensors", "model.layers.12.block_sparse_moe.experts.2.w3.weight": "model-00003-of-00019.safetensors", "model.layers.12.block_sparse_moe.experts.3.w1.weight": "model-00003-of-00019.safetensors", "model.layers.12.block_sparse_moe.experts.3.w2.weight": "model-00003-of-00019.safetensors", "model.layers.12.block_sparse_moe.experts.3.w3.weight": "model-00003-of-00019.safetensors", "model.layers.12.block_sparse_moe.experts.4.w1.weight": "model-00003-of-00019.safetensors", "model.layers.12.block_sparse_moe.experts.4.w2.weight": "model-00003-of-00019.safetensors", "model.layers.12.block_sparse_moe.experts.4.w3.weight": "model-00003-of-00019.safetensors", "model.layers.12.block_sparse_moe.experts.5.w1.weight": "model-00003-of-00019.safetensors", "model.layers.12.block_sparse_moe.experts.5.w2.weight": "model-00003-of-00019.safetensors", "model.layers.12.block_sparse_moe.experts.5.w3.weight": "model-00003-of-00019.safetensors", "model.layers.12.block_sparse_moe.experts.6.w1.weight": "model-00003-of-00019.safetensors", "model.layers.12.block_sparse_moe.experts.6.w2.weight": "model-00003-of-00019.safetensors", "model.layers.12.block_sparse_moe.experts.6.w3.weight": "model-00003-of-00019.safetensors", "model.layers.12.block_sparse_moe.experts.7.w1.weight": "model-00003-of-00019.safetensors", "model.layers.12.block_sparse_moe.experts.7.w2.weight": "model-00003-of-00019.safetensors", "model.layers.12.block_sparse_moe.experts.7.w3.weight": "model-00004-of-00019.safetensors", "model.layers.12.block_sparse_moe.gate.weight": "model-00004-of-00019.safetensors", "model.layers.12.input_layernorm.weight": "model-00004-of-00019.safetensors", "model.layers.12.post_attention_layernorm.weight": "model-00004-of-00019.safetensors", "model.layers.12.self_attn.k_proj.weight": "model-00004-of-00019.safetensors", "model.layers.12.self_attn.o_proj.weight": "model-00004-of-00019.safetensors", "model.layers.12.self_attn.q_proj.weight": "model-00004-of-00019.safetensors", "model.layers.12.self_attn.v_proj.weight": "model-00004-of-00019.safetensors", "model.layers.13.block_sparse_moe.experts.0.w1.weight": "model-00004-of-00019.safetensors", "model.layers.13.block_sparse_moe.experts.0.w2.weight": "model-00004-of-00019.safetensors", "model.layers.13.block_sparse_moe.experts.0.w3.weight": "model-00004-of-00019.safetensors", "model.layers.13.block_sparse_moe.experts.1.w1.weight": "model-00004-of-00019.safetensors", "model.layers.13.block_sparse_moe.experts.1.w2.weight": "model-00004-of-00019.safetensors", "model.layers.13.block_sparse_moe.experts.1.w3.weight": "model-00004-of-00019.safetensors", "model.layers.13.block_sparse_moe.experts.2.w1.weight": "model-00004-of-00019.safetensors", "model.layers.13.block_sparse_moe.experts.2.w2.weight": "model-00004-of-00019.safetensors", "model.layers.13.block_sparse_moe.experts.2.w3.weight": "model-00004-of-00019.safetensors", "model.layers.13.block_sparse_moe.experts.3.w1.weight": "model-00004-of-00019.safetensors", "model.layers.13.block_sparse_moe.experts.3.w2.weight": "model-00004-of-00019.safetensors", "model.layers.13.block_sparse_moe.experts.3.w3.weight": "model-00004-of-00019.safetensors", "model.layers.13.block_sparse_moe.experts.4.w1.weight": "model-00004-of-00019.safetensors", "model.layers.13.block_sparse_moe.experts.4.w2.weight": "model-00004-of-00019.safetensors", "model.layers.13.block_sparse_moe.experts.4.w3.weight": "model-00004-of-00019.safetensors", "model.layers.13.block_sparse_moe.experts.5.w1.weight": "model-00004-of-00019.safetensors", "model.layers.13.block_sparse_moe.experts.5.w2.weight": "model-00004-of-00019.safetensors", "model.layers.13.block_sparse_moe.experts.5.w3.weight": "model-00004-of-00019.safetensors", "model.layers.13.block_sparse_moe.experts.6.w1.weight": "model-00004-of-00019.safetensors", "model.layers.13.block_sparse_moe.experts.6.w2.weight": "model-00004-of-00019.safetensors", "model.layers.13.block_sparse_moe.experts.6.w3.weight": "model-00004-of-00019.safetensors", "model.layers.13.block_sparse_moe.experts.7.w1.weight": "model-00004-of-00019.safetensors", "model.layers.13.block_sparse_moe.experts.7.w2.weight": "model-00004-of-00019.safetensors", "model.layers.13.block_sparse_moe.experts.7.w3.weight": "model-00004-of-00019.safetensors", "model.layers.13.block_sparse_moe.gate.weight": "model-00004-of-00019.safetensors", "model.layers.13.input_layernorm.weight": "model-00004-of-00019.safetensors", "model.layers.13.post_attention_layernorm.weight": "model-00004-of-00019.safetensors", "model.layers.13.self_attn.k_proj.weight": "model-00004-of-00019.safetensors", "model.layers.13.self_attn.o_proj.weight": "model-00004-of-00019.safetensors", "model.layers.13.self_attn.q_proj.weight": "model-00004-of-00019.safetensors", "model.layers.13.self_attn.v_proj.weight": "model-00004-of-00019.safetensors", "model.layers.14.block_sparse_moe.experts.0.w1.weight": "model-00004-of-00019.safetensors", "model.layers.14.block_sparse_moe.experts.0.w2.weight": "model-00004-of-00019.safetensors", "model.layers.14.block_sparse_moe.experts.0.w3.weight": "model-00004-of-00019.safetensors", "model.layers.14.block_sparse_moe.experts.1.w1.weight": "model-00004-of-00019.safetensors", "model.layers.14.block_sparse_moe.experts.1.w2.weight": "model-00004-of-00019.safetensors", "model.layers.14.block_sparse_moe.experts.1.w3.weight": "model-00004-of-00019.safetensors", "model.layers.14.block_sparse_moe.experts.2.w1.weight": "model-00004-of-00019.safetensors", "model.layers.14.block_sparse_moe.experts.2.w2.weight": "model-00004-of-00019.safetensors", "model.layers.14.block_sparse_moe.experts.2.w3.weight": "model-00004-of-00019.safetensors", "model.layers.14.block_sparse_moe.experts.3.w1.weight": "model-00004-of-00019.safetensors", "model.layers.14.block_sparse_moe.experts.3.w2.weight": "model-00004-of-00019.safetensors", "model.layers.14.block_sparse_moe.experts.3.w3.weight": "model-00004-of-00019.safetensors", "model.layers.14.block_sparse_moe.experts.4.w1.weight": "model-00004-of-00019.safetensors", "model.layers.14.block_sparse_moe.experts.4.w2.weight": "model-00004-of-00019.safetensors", "model.layers.14.block_sparse_moe.experts.4.w3.weight": "model-00004-of-00019.safetensors", "model.layers.14.block_sparse_moe.experts.5.w1.weight": "model-00004-of-00019.safetensors", "model.layers.14.block_sparse_moe.experts.5.w2.weight": "model-00005-of-00019.safetensors", "model.layers.14.block_sparse_moe.experts.5.w3.weight": "model-00005-of-00019.safetensors", "model.layers.14.block_sparse_moe.experts.6.w1.weight": "model-00005-of-00019.safetensors", "model.layers.14.block_sparse_moe.experts.6.w2.weight": "model-00005-of-00019.safetensors", "model.layers.14.block_sparse_moe.experts.6.w3.weight": "model-00005-of-00019.safetensors", "model.layers.14.block_sparse_moe.experts.7.w1.weight": "model-00005-of-00019.safetensors", "model.layers.14.block_sparse_moe.experts.7.w2.weight": "model-00005-of-00019.safetensors", "model.layers.14.block_sparse_moe.experts.7.w3.weight": "model-00005-of-00019.safetensors", "model.layers.14.block_sparse_moe.gate.weight": "model-00005-of-00019.safetensors", "model.layers.14.input_layernorm.weight": "model-00005-of-00019.safetensors", "model.layers.14.post_attention_layernorm.weight": "model-00005-of-00019.safetensors", "model.layers.14.self_attn.k_proj.weight": "model-00005-of-00019.safetensors", "model.layers.14.self_attn.o_proj.weight": "model-00005-of-00019.safetensors", "model.layers.14.self_attn.q_proj.weight": "model-00005-of-00019.safetensors", "model.layers.14.self_attn.v_proj.weight": "model-00005-of-00019.safetensors", "model.layers.15.block_sparse_moe.experts.0.w1.weight": "model-00005-of-00019.safetensors", "model.layers.15.block_sparse_moe.experts.0.w2.weight": "model-00005-of-00019.safetensors", "model.layers.15.block_sparse_moe.experts.0.w3.weight": "model-00005-of-00019.safetensors", "model.layers.15.block_sparse_moe.experts.1.w1.weight": "model-00005-of-00019.safetensors", "model.layers.15.block_sparse_moe.experts.1.w2.weight": "model-00005-of-00019.safetensors", "model.layers.15.block_sparse_moe.experts.1.w3.weight": "model-00005-of-00019.safetensors", "model.layers.15.block_sparse_moe.experts.2.w1.weight": "model-00005-of-00019.safetensors", "model.layers.15.block_sparse_moe.experts.2.w2.weight": "model-00005-of-00019.safetensors", "model.layers.15.block_sparse_moe.experts.2.w3.weight": "model-00005-of-00019.safetensors", "model.layers.15.block_sparse_moe.experts.3.w1.weight": "model-00005-of-00019.safetensors", "model.layers.15.block_sparse_moe.experts.3.w2.weight": "model-00005-of-00019.safetensors", "model.layers.15.block_sparse_moe.experts.3.w3.weight": "model-00005-of-00019.safetensors", "model.layers.15.block_sparse_moe.experts.4.w1.weight": "model-00005-of-00019.safetensors", "model.layers.15.block_sparse_moe.experts.4.w2.weight": "model-00005-of-00019.safetensors", "model.layers.15.block_sparse_moe.experts.4.w3.weight": "model-00005-of-00019.safetensors", "model.layers.15.block_sparse_moe.experts.5.w1.weight": "model-00005-of-00019.safetensors", "model.layers.15.block_sparse_moe.experts.5.w2.weight": "model-00005-of-00019.safetensors", "model.layers.15.block_sparse_moe.experts.5.w3.weight": "model-00005-of-00019.safetensors", "model.layers.15.block_sparse_moe.experts.6.w1.weight": "model-00005-of-00019.safetensors", "model.layers.15.block_sparse_moe.experts.6.w2.weight": "model-00005-of-00019.safetensors", "model.layers.15.block_sparse_moe.experts.6.w3.weight": "model-00005-of-00019.safetensors", "model.layers.15.block_sparse_moe.experts.7.w1.weight": "model-00005-of-00019.safetensors", "model.layers.15.block_sparse_moe.experts.7.w2.weight": "model-00005-of-00019.safetensors", "model.layers.15.block_sparse_moe.experts.7.w3.weight": "model-00005-of-00019.safetensors", "model.layers.15.block_sparse_moe.gate.weight": "model-00005-of-00019.safetensors", "model.layers.15.input_layernorm.weight": "model-00005-of-00019.safetensors", "model.layers.15.post_attention_layernorm.weight": "model-00005-of-00019.safetensors", "model.layers.15.self_attn.k_proj.weight": "model-00005-of-00019.safetensors", "model.layers.15.self_attn.o_proj.weight": "model-00005-of-00019.safetensors", "model.layers.15.self_attn.q_proj.weight": "model-00005-of-00019.safetensors", "model.layers.15.self_attn.v_proj.weight": "model-00005-of-00019.safetensors", "model.layers.16.block_sparse_moe.experts.0.w1.weight": "model-00005-of-00019.safetensors", "model.layers.16.block_sparse_moe.experts.0.w2.weight": "model-00005-of-00019.safetensors", "model.layers.16.block_sparse_moe.experts.0.w3.weight": "model-00005-of-00019.safetensors", "model.layers.16.block_sparse_moe.experts.1.w1.weight": "model-00005-of-00019.safetensors", "model.layers.16.block_sparse_moe.experts.1.w2.weight": "model-00005-of-00019.safetensors", "model.layers.16.block_sparse_moe.experts.1.w3.weight": "model-00005-of-00019.safetensors", "model.layers.16.block_sparse_moe.experts.2.w1.weight": "model-00005-of-00019.safetensors", "model.layers.16.block_sparse_moe.experts.2.w2.weight": "model-00005-of-00019.safetensors", "model.layers.16.block_sparse_moe.experts.2.w3.weight": "model-00005-of-00019.safetensors", "model.layers.16.block_sparse_moe.experts.3.w1.weight": "model-00006-of-00019.safetensors", "model.layers.16.block_sparse_moe.experts.3.w2.weight": "model-00006-of-00019.safetensors", "model.layers.16.block_sparse_moe.experts.3.w3.weight": "model-00006-of-00019.safetensors", "model.layers.16.block_sparse_moe.experts.4.w1.weight": "model-00006-of-00019.safetensors", "model.layers.16.block_sparse_moe.experts.4.w2.weight": "model-00006-of-00019.safetensors", "model.layers.16.block_sparse_moe.experts.4.w3.weight": "model-00006-of-00019.safetensors", "model.layers.16.block_sparse_moe.experts.5.w1.weight": "model-00006-of-00019.safetensors", "model.layers.16.block_sparse_moe.experts.5.w2.weight": "model-00006-of-00019.safetensors", "model.layers.16.block_sparse_moe.experts.5.w3.weight": "model-00006-of-00019.safetensors", "model.layers.16.block_sparse_moe.experts.6.w1.weight": "model-00006-of-00019.safetensors", "model.layers.16.block_sparse_moe.experts.6.w2.weight": "model-00006-of-00019.safetensors", "model.layers.16.block_sparse_moe.experts.6.w3.weight": "model-00006-of-00019.safetensors", "model.layers.16.block_sparse_moe.experts.7.w1.weight": "model-00006-of-00019.safetensors", "model.layers.16.block_sparse_moe.experts.7.w2.weight": "model-00006-of-00019.safetensors", "model.layers.16.block_sparse_moe.experts.7.w3.weight": "model-00006-of-00019.safetensors", "model.layers.16.block_sparse_moe.gate.weight": "model-00006-of-00019.safetensors", "model.layers.16.input_layernorm.weight": "model-00006-of-00019.safetensors", "model.layers.16.post_attention_layernorm.weight": "model-00006-of-00019.safetensors", "model.layers.16.self_attn.k_proj.weight": "model-00006-of-00019.safetensors", "model.layers.16.self_attn.o_proj.weight": "model-00006-of-00019.safetensors", "model.layers.16.self_attn.q_proj.weight": "model-00006-of-00019.safetensors", "model.layers.16.self_attn.v_proj.weight": "model-00006-of-00019.safetensors", "model.layers.17.block_sparse_moe.experts.0.w1.weight": "model-00006-of-00019.safetensors", "model.layers.17.block_sparse_moe.experts.0.w2.weight": "model-00006-of-00019.safetensors", "model.layers.17.block_sparse_moe.experts.0.w3.weight": "model-00006-of-00019.safetensors", "model.layers.17.block_sparse_moe.experts.1.w1.weight": "model-00006-of-00019.safetensors", "model.layers.17.block_sparse_moe.experts.1.w2.weight": "model-00006-of-00019.safetensors", "model.layers.17.block_sparse_moe.experts.1.w3.weight": "model-00006-of-00019.safetensors", "model.layers.17.block_sparse_moe.experts.2.w1.weight": "model-00006-of-00019.safetensors", "model.layers.17.block_sparse_moe.experts.2.w2.weight": "model-00006-of-00019.safetensors", "model.layers.17.block_sparse_moe.experts.2.w3.weight": "model-00006-of-00019.safetensors", "model.layers.17.block_sparse_moe.experts.3.w1.weight": "model-00006-of-00019.safetensors", "model.layers.17.block_sparse_moe.experts.3.w2.weight": "model-00006-of-00019.safetensors", "model.layers.17.block_sparse_moe.experts.3.w3.weight": "model-00006-of-00019.safetensors", "model.layers.17.block_sparse_moe.experts.4.w1.weight": "model-00006-of-00019.safetensors", "model.layers.17.block_sparse_moe.experts.4.w2.weight": "model-00006-of-00019.safetensors", "model.layers.17.block_sparse_moe.experts.4.w3.weight": "model-00006-of-00019.safetensors", "model.layers.17.block_sparse_moe.experts.5.w1.weight": "model-00006-of-00019.safetensors", "model.layers.17.block_sparse_moe.experts.5.w2.weight": "model-00006-of-00019.safetensors", "model.layers.17.block_sparse_moe.experts.5.w3.weight": "model-00006-of-00019.safetensors", "model.layers.17.block_sparse_moe.experts.6.w1.weight": "model-00006-of-00019.safetensors", "model.layers.17.block_sparse_moe.experts.6.w2.weight": "model-00006-of-00019.safetensors", "model.layers.17.block_sparse_moe.experts.6.w3.weight": "model-00006-of-00019.safetensors", "model.layers.17.block_sparse_moe.experts.7.w1.weight": "model-00006-of-00019.safetensors", "model.layers.17.block_sparse_moe.experts.7.w2.weight": "model-00006-of-00019.safetensors", "model.layers.17.block_sparse_moe.experts.7.w3.weight": "model-00006-of-00019.safetensors", "model.layers.17.block_sparse_moe.gate.weight": "model-00006-of-00019.safetensors", "model.layers.17.input_layernorm.weight": "model-00006-of-00019.safetensors", "model.layers.17.post_attention_layernorm.weight": "model-00006-of-00019.safetensors", "model.layers.17.self_attn.k_proj.weight": "model-00006-of-00019.safetensors", "model.layers.17.self_attn.o_proj.weight": "model-00006-of-00019.safetensors", "model.layers.17.self_attn.q_proj.weight": "model-00006-of-00019.safetensors", "model.layers.17.self_attn.v_proj.weight": "model-00006-of-00019.safetensors", "model.layers.18.block_sparse_moe.experts.0.w1.weight": "model-00006-of-00019.safetensors", "model.layers.18.block_sparse_moe.experts.0.w2.weight": "model-00006-of-00019.safetensors", "model.layers.18.block_sparse_moe.experts.0.w3.weight": "model-00007-of-00019.safetensors", "model.layers.18.block_sparse_moe.experts.1.w1.weight": "model-00007-of-00019.safetensors", "model.layers.18.block_sparse_moe.experts.1.w2.weight": "model-00007-of-00019.safetensors", "model.layers.18.block_sparse_moe.experts.1.w3.weight": "model-00007-of-00019.safetensors", "model.layers.18.block_sparse_moe.experts.2.w1.weight": "model-00007-of-00019.safetensors", "model.layers.18.block_sparse_moe.experts.2.w2.weight": "model-00007-of-00019.safetensors", "model.layers.18.block_sparse_moe.experts.2.w3.weight": "model-00007-of-00019.safetensors", "model.layers.18.block_sparse_moe.experts.3.w1.weight": "model-00007-of-00019.safetensors", "model.layers.18.block_sparse_moe.experts.3.w2.weight": "model-00007-of-00019.safetensors", "model.layers.18.block_sparse_moe.experts.3.w3.weight": "model-00007-of-00019.safetensors", "model.layers.18.block_sparse_moe.experts.4.w1.weight": "model-00007-of-00019.safetensors", "model.layers.18.block_sparse_moe.experts.4.w2.weight": "model-00007-of-00019.safetensors", "model.layers.18.block_sparse_moe.experts.4.w3.weight": "model-00007-of-00019.safetensors", "model.layers.18.block_sparse_moe.experts.5.w1.weight": "model-00007-of-00019.safetensors", "model.layers.18.block_sparse_moe.experts.5.w2.weight": "model-00007-of-00019.safetensors", "model.layers.18.block_sparse_moe.experts.5.w3.weight": "model-00007-of-00019.safetensors", "model.layers.18.block_sparse_moe.experts.6.w1.weight": "model-00007-of-00019.safetensors", "model.layers.18.block_sparse_moe.experts.6.w2.weight": "model-00007-of-00019.safetensors", "model.layers.18.block_sparse_moe.experts.6.w3.weight": "model-00007-of-00019.safetensors", "model.layers.18.block_sparse_moe.experts.7.w1.weight": "model-00007-of-00019.safetensors", "model.layers.18.block_sparse_moe.experts.7.w2.weight": "model-00007-of-00019.safetensors", "model.layers.18.block_sparse_moe.experts.7.w3.weight": "model-00007-of-00019.safetensors", "model.layers.18.block_sparse_moe.gate.weight": "model-00007-of-00019.safetensors", "model.layers.18.input_layernorm.weight": "model-00007-of-00019.safetensors", "model.layers.18.post_attention_layernorm.weight": "model-00007-of-00019.safetensors", "model.layers.18.self_attn.k_proj.weight": "model-00007-of-00019.safetensors", "model.layers.18.self_attn.o_proj.weight": "model-00007-of-00019.safetensors", "model.layers.18.self_attn.q_proj.weight": "model-00007-of-00019.safetensors", "model.layers.18.self_attn.v_proj.weight": "model-00007-of-00019.safetensors", "model.layers.19.block_sparse_moe.experts.0.w1.weight": "model-00007-of-00019.safetensors", "model.layers.19.block_sparse_moe.experts.0.w2.weight": "model-00007-of-00019.safetensors", "model.layers.19.block_sparse_moe.experts.0.w3.weight": "model-00007-of-00019.safetensors", "model.layers.19.block_sparse_moe.experts.1.w1.weight": "model-00007-of-00019.safetensors", "model.layers.19.block_sparse_moe.experts.1.w2.weight": "model-00007-of-00019.safetensors", "model.layers.19.block_sparse_moe.experts.1.w3.weight": "model-00007-of-00019.safetensors", "model.layers.19.block_sparse_moe.experts.2.w1.weight": "model-00007-of-00019.safetensors", "model.layers.19.block_sparse_moe.experts.2.w2.weight": "model-00007-of-00019.safetensors", "model.layers.19.block_sparse_moe.experts.2.w3.weight": "model-00007-of-00019.safetensors", "model.layers.19.block_sparse_moe.experts.3.w1.weight": "model-00007-of-00019.safetensors", "model.layers.19.block_sparse_moe.experts.3.w2.weight": "model-00007-of-00019.safetensors", "model.layers.19.block_sparse_moe.experts.3.w3.weight": "model-00007-of-00019.safetensors", "model.layers.19.block_sparse_moe.experts.4.w1.weight": "model-00007-of-00019.safetensors", "model.layers.19.block_sparse_moe.experts.4.w2.weight": "model-00007-of-00019.safetensors", "model.layers.19.block_sparse_moe.experts.4.w3.weight": "model-00007-of-00019.safetensors", "model.layers.19.block_sparse_moe.experts.5.w1.weight": "model-00007-of-00019.safetensors", "model.layers.19.block_sparse_moe.experts.5.w2.weight": "model-00007-of-00019.safetensors", "model.layers.19.block_sparse_moe.experts.5.w3.weight": "model-00007-of-00019.safetensors", "model.layers.19.block_sparse_moe.experts.6.w1.weight": "model-00007-of-00019.safetensors", "model.layers.19.block_sparse_moe.experts.6.w2.weight": "model-00008-of-00019.safetensors", "model.layers.19.block_sparse_moe.experts.6.w3.weight": "model-00008-of-00019.safetensors", "model.layers.19.block_sparse_moe.experts.7.w1.weight": "model-00008-of-00019.safetensors", "model.layers.19.block_sparse_moe.experts.7.w2.weight": "model-00008-of-00019.safetensors", "model.layers.19.block_sparse_moe.experts.7.w3.weight": "model-00008-of-00019.safetensors", "model.layers.19.block_sparse_moe.gate.weight": "model-00008-of-00019.safetensors", "model.layers.19.input_layernorm.weight": "model-00008-of-00019.safetensors", "model.layers.19.post_attention_layernorm.weight": "model-00008-of-00019.safetensors", "model.layers.19.self_attn.k_proj.weight": "model-00008-of-00019.safetensors", "model.layers.19.self_attn.o_proj.weight": "model-00008-of-00019.safetensors", "model.layers.19.self_attn.q_proj.weight": "model-00008-of-00019.safetensors", "model.layers.19.self_attn.v_proj.weight": "model-00008-of-00019.safetensors", "model.layers.2.block_sparse_moe.experts.0.w1.weight": "model-00008-of-00019.safetensors", "model.layers.2.block_sparse_moe.experts.0.w2.weight": "model-00008-of-00019.safetensors", "model.layers.2.block_sparse_moe.experts.0.w3.weight": "model-00008-of-00019.safetensors", "model.layers.2.block_sparse_moe.experts.1.w1.weight": "model-00008-of-00019.safetensors", "model.layers.2.block_sparse_moe.experts.1.w2.weight": "model-00008-of-00019.safetensors", "model.layers.2.block_sparse_moe.experts.1.w3.weight": "model-00008-of-00019.safetensors", "model.layers.2.block_sparse_moe.experts.2.w1.weight": "model-00008-of-00019.safetensors", "model.layers.2.block_sparse_moe.experts.2.w2.weight": "model-00008-of-00019.safetensors", "model.layers.2.block_sparse_moe.experts.2.w3.weight": "model-00008-of-00019.safetensors", "model.layers.2.block_sparse_moe.experts.3.w1.weight": "model-00008-of-00019.safetensors", "model.layers.2.block_sparse_moe.experts.3.w2.weight": "model-00008-of-00019.safetensors", "model.layers.2.block_sparse_moe.experts.3.w3.weight": "model-00008-of-00019.safetensors", "model.layers.2.block_sparse_moe.experts.4.w1.weight": "model-00008-of-00019.safetensors", "model.layers.2.block_sparse_moe.experts.4.w2.weight": "model-00008-of-00019.safetensors", "model.layers.2.block_sparse_moe.experts.4.w3.weight": "model-00008-of-00019.safetensors", "model.layers.2.block_sparse_moe.experts.5.w1.weight": "model-00008-of-00019.safetensors", "model.layers.2.block_sparse_moe.experts.5.w2.weight": "model-00008-of-00019.safetensors", "model.layers.2.block_sparse_moe.experts.5.w3.weight": "model-00008-of-00019.safetensors", "model.layers.2.block_sparse_moe.experts.6.w1.weight": "model-00008-of-00019.safetensors", "model.layers.2.block_sparse_moe.experts.6.w2.weight": "model-00008-of-00019.safetensors", "model.layers.2.block_sparse_moe.experts.6.w3.weight": "model-00008-of-00019.safetensors", "model.layers.2.block_sparse_moe.experts.7.w1.weight": "model-00008-of-00019.safetensors", "model.layers.2.block_sparse_moe.experts.7.w2.weight": "model-00008-of-00019.safetensors", "model.layers.2.block_sparse_moe.experts.7.w3.weight": "model-00008-of-00019.safetensors", "model.layers.2.block_sparse_moe.gate.weight": "model-00008-of-00019.safetensors", "model.layers.2.input_layernorm.weight": "model-00008-of-00019.safetensors", "model.layers.2.post_attention_layernorm.weight": "model-00008-of-00019.safetensors", "model.layers.2.self_attn.k_proj.weight": "model-00008-of-00019.safetensors", "model.layers.2.self_attn.o_proj.weight": "model-00008-of-00019.safetensors", "model.layers.2.self_attn.q_proj.weight": "model-00008-of-00019.safetensors", "model.layers.2.self_attn.v_proj.weight": "model-00008-of-00019.safetensors", "model.layers.20.block_sparse_moe.experts.0.w1.weight": "model-00008-of-00019.safetensors", "model.layers.20.block_sparse_moe.experts.0.w2.weight": "model-00008-of-00019.safetensors", "model.layers.20.block_sparse_moe.experts.0.w3.weight": "model-00008-of-00019.safetensors", "model.layers.20.block_sparse_moe.experts.1.w1.weight": "model-00008-of-00019.safetensors", "model.layers.20.block_sparse_moe.experts.1.w2.weight": "model-00008-of-00019.safetensors", "model.layers.20.block_sparse_moe.experts.1.w3.weight": "model-00008-of-00019.safetensors", "model.layers.20.block_sparse_moe.experts.2.w1.weight": "model-00008-of-00019.safetensors", "model.layers.20.block_sparse_moe.experts.2.w2.weight": "model-00008-of-00019.safetensors", "model.layers.20.block_sparse_moe.experts.2.w3.weight": "model-00008-of-00019.safetensors", "model.layers.20.block_sparse_moe.experts.3.w1.weight": "model-00008-of-00019.safetensors", "model.layers.20.block_sparse_moe.experts.3.w2.weight": "model-00008-of-00019.safetensors", "model.layers.20.block_sparse_moe.experts.3.w3.weight": "model-00008-of-00019.safetensors", "model.layers.20.block_sparse_moe.experts.4.w1.weight": "model-00009-of-00019.safetensors", "model.layers.20.block_sparse_moe.experts.4.w2.weight": "model-00009-of-00019.safetensors", "model.layers.20.block_sparse_moe.experts.4.w3.weight": "model-00009-of-00019.safetensors", "model.layers.20.block_sparse_moe.experts.5.w1.weight": "model-00009-of-00019.safetensors", "model.layers.20.block_sparse_moe.experts.5.w2.weight": "model-00009-of-00019.safetensors", "model.layers.20.block_sparse_moe.experts.5.w3.weight": "model-00009-of-00019.safetensors", "model.layers.20.block_sparse_moe.experts.6.w1.weight": "model-00009-of-00019.safetensors", "model.layers.20.block_sparse_moe.experts.6.w2.weight": "model-00009-of-00019.safetensors", "model.layers.20.block_sparse_moe.experts.6.w3.weight": "model-00009-of-00019.safetensors", "model.layers.20.block_sparse_moe.experts.7.w1.weight": "model-00009-of-00019.safetensors", "model.layers.20.block_sparse_moe.experts.7.w2.weight": "model-00009-of-00019.safetensors", "model.layers.20.block_sparse_moe.experts.7.w3.weight": "model-00009-of-00019.safetensors", "model.layers.20.block_sparse_moe.gate.weight": "model-00009-of-00019.safetensors", "model.layers.20.input_layernorm.weight": "model-00009-of-00019.safetensors", "model.layers.20.post_attention_layernorm.weight": "model-00009-of-00019.safetensors", "model.layers.20.self_attn.k_proj.weight": "model-00009-of-00019.safetensors", "model.layers.20.self_attn.o_proj.weight": "model-00009-of-00019.safetensors", "model.layers.20.self_attn.q_proj.weight": "model-00009-of-00019.safetensors", "model.layers.20.self_attn.v_proj.weight": "model-00009-of-00019.safetensors", "model.layers.21.block_sparse_moe.experts.0.w1.weight": "model-00009-of-00019.safetensors", "model.layers.21.block_sparse_moe.experts.0.w2.weight": "model-00009-of-00019.safetensors", "model.layers.21.block_sparse_moe.experts.0.w3.weight": "model-00009-of-00019.safetensors", "model.layers.21.block_sparse_moe.experts.1.w1.weight": "model-00009-of-00019.safetensors", "model.layers.21.block_sparse_moe.experts.1.w2.weight": "model-00009-of-00019.safetensors", "model.layers.21.block_sparse_moe.experts.1.w3.weight": "model-00009-of-00019.safetensors", "model.layers.21.block_sparse_moe.experts.2.w1.weight": "model-00009-of-00019.safetensors", "model.layers.21.block_sparse_moe.experts.2.w2.weight": "model-00009-of-00019.safetensors", "model.layers.21.block_sparse_moe.experts.2.w3.weight": "model-00009-of-00019.safetensors", "model.layers.21.block_sparse_moe.experts.3.w1.weight": "model-00009-of-00019.safetensors", "model.layers.21.block_sparse_moe.experts.3.w2.weight": "model-00009-of-00019.safetensors", "model.layers.21.block_sparse_moe.experts.3.w3.weight": "model-00009-of-00019.safetensors", "model.layers.21.block_sparse_moe.experts.4.w1.weight": "model-00009-of-00019.safetensors", "model.layers.21.block_sparse_moe.experts.4.w2.weight": "model-00009-of-00019.safetensors", "model.layers.21.block_sparse_moe.experts.4.w3.weight": "model-00009-of-00019.safetensors", "model.layers.21.block_sparse_moe.experts.5.w1.weight": "model-00009-of-00019.safetensors", "model.layers.21.block_sparse_moe.experts.5.w2.weight": "model-00009-of-00019.safetensors", "model.layers.21.block_sparse_moe.experts.5.w3.weight": "model-00009-of-00019.safetensors", "model.layers.21.block_sparse_moe.experts.6.w1.weight": "model-00009-of-00019.safetensors", "model.layers.21.block_sparse_moe.experts.6.w2.weight": "model-00009-of-00019.safetensors", "model.layers.21.block_sparse_moe.experts.6.w3.weight": "model-00009-of-00019.safetensors", "model.layers.21.block_sparse_moe.experts.7.w1.weight": "model-00009-of-00019.safetensors", "model.layers.21.block_sparse_moe.experts.7.w2.weight": "model-00009-of-00019.safetensors", "model.layers.21.block_sparse_moe.experts.7.w3.weight": "model-00009-of-00019.safetensors", "model.layers.21.block_sparse_moe.gate.weight": "model-00009-of-00019.safetensors", "model.layers.21.input_layernorm.weight": "model-00009-of-00019.safetensors", "model.layers.21.post_attention_layernorm.weight": "model-00009-of-00019.safetensors", "model.layers.21.self_attn.k_proj.weight": "model-00009-of-00019.safetensors", "model.layers.21.self_attn.o_proj.weight": "model-00009-of-00019.safetensors", "model.layers.21.self_attn.q_proj.weight": "model-00009-of-00019.safetensors", "model.layers.21.self_attn.v_proj.weight": "model-00009-of-00019.safetensors", "model.layers.22.block_sparse_moe.experts.0.w1.weight": "model-00009-of-00019.safetensors", "model.layers.22.block_sparse_moe.experts.0.w2.weight": "model-00009-of-00019.safetensors", "model.layers.22.block_sparse_moe.experts.0.w3.weight": "model-00009-of-00019.safetensors", "model.layers.22.block_sparse_moe.experts.1.w1.weight": "model-00009-of-00019.safetensors", "model.layers.22.block_sparse_moe.experts.1.w2.weight": "model-00009-of-00019.safetensors", "model.layers.22.block_sparse_moe.experts.1.w3.weight": "model-00010-of-00019.safetensors", "model.layers.22.block_sparse_moe.experts.2.w1.weight": "model-00010-of-00019.safetensors", "model.layers.22.block_sparse_moe.experts.2.w2.weight": "model-00010-of-00019.safetensors", "model.layers.22.block_sparse_moe.experts.2.w3.weight": "model-00010-of-00019.safetensors", "model.layers.22.block_sparse_moe.experts.3.w1.weight": "model-00010-of-00019.safetensors", "model.layers.22.block_sparse_moe.experts.3.w2.weight": "model-00010-of-00019.safetensors", "model.layers.22.block_sparse_moe.experts.3.w3.weight": "model-00010-of-00019.safetensors", "model.layers.22.block_sparse_moe.experts.4.w1.weight": "model-00010-of-00019.safetensors", "model.layers.22.block_sparse_moe.experts.4.w2.weight": "model-00010-of-00019.safetensors", "model.layers.22.block_sparse_moe.experts.4.w3.weight": "model-00010-of-00019.safetensors", "model.layers.22.block_sparse_moe.experts.5.w1.weight": "model-00010-of-00019.safetensors", "model.layers.22.block_sparse_moe.experts.5.w2.weight": "model-00010-of-00019.safetensors", "model.layers.22.block_sparse_moe.experts.5.w3.weight": "model-00010-of-00019.safetensors", "model.layers.22.block_sparse_moe.experts.6.w1.weight": "model-00010-of-00019.safetensors", "model.layers.22.block_sparse_moe.experts.6.w2.weight": "model-00010-of-00019.safetensors", "model.layers.22.block_sparse_moe.experts.6.w3.weight": "model-00010-of-00019.safetensors", "model.layers.22.block_sparse_moe.experts.7.w1.weight": "model-00010-of-00019.safetensors", "model.layers.22.block_sparse_moe.experts.7.w2.weight": "model-00010-of-00019.safetensors", "model.layers.22.block_sparse_moe.experts.7.w3.weight": "model-00010-of-00019.safetensors", "model.layers.22.block_sparse_moe.gate.weight": "model-00010-of-00019.safetensors", "model.layers.22.input_layernorm.weight": "model-00010-of-00019.safetensors", "model.layers.22.post_attention_layernorm.weight": "model-00010-of-00019.safetensors", "model.layers.22.self_attn.k_proj.weight": "model-00010-of-00019.safetensors", "model.layers.22.self_attn.o_proj.weight": "model-00010-of-00019.safetensors", "model.layers.22.self_attn.q_proj.weight": "model-00010-of-00019.safetensors", "model.layers.22.self_attn.v_proj.weight": "model-00010-of-00019.safetensors", "model.layers.23.block_sparse_moe.experts.0.w1.weight": "model-00010-of-00019.safetensors", "model.layers.23.block_sparse_moe.experts.0.w2.weight": "model-00010-of-00019.safetensors", "model.layers.23.block_sparse_moe.experts.0.w3.weight": "model-00010-of-00019.safetensors", "model.layers.23.block_sparse_moe.experts.1.w1.weight": "model-00010-of-00019.safetensors", "model.layers.23.block_sparse_moe.experts.1.w2.weight": "model-00010-of-00019.safetensors", "model.layers.23.block_sparse_moe.experts.1.w3.weight": "model-00010-of-00019.safetensors", "model.layers.23.block_sparse_moe.experts.2.w1.weight": "model-00010-of-00019.safetensors", "model.layers.23.block_sparse_moe.experts.2.w2.weight": "model-00010-of-00019.safetensors", "model.layers.23.block_sparse_moe.experts.2.w3.weight": "model-00010-of-00019.safetensors", "model.layers.23.block_sparse_moe.experts.3.w1.weight": "model-00010-of-00019.safetensors", "model.layers.23.block_sparse_moe.experts.3.w2.weight": "model-00010-of-00019.safetensors", "model.layers.23.block_sparse_moe.experts.3.w3.weight": "model-00010-of-00019.safetensors", "model.layers.23.block_sparse_moe.experts.4.w1.weight": "model-00010-of-00019.safetensors", "model.layers.23.block_sparse_moe.experts.4.w2.weight": "model-00010-of-00019.safetensors", "model.layers.23.block_sparse_moe.experts.4.w3.weight": "model-00010-of-00019.safetensors", "model.layers.23.block_sparse_moe.experts.5.w1.weight": "model-00010-of-00019.safetensors", "model.layers.23.block_sparse_moe.experts.5.w2.weight": "model-00010-of-00019.safetensors", "model.layers.23.block_sparse_moe.experts.5.w3.weight": "model-00010-of-00019.safetensors", "model.layers.23.block_sparse_moe.experts.6.w1.weight": "model-00010-of-00019.safetensors", "model.layers.23.block_sparse_moe.experts.6.w2.weight": "model-00010-of-00019.safetensors", "model.layers.23.block_sparse_moe.experts.6.w3.weight": "model-00010-of-00019.safetensors", "model.layers.23.block_sparse_moe.experts.7.w1.weight": "model-00010-of-00019.safetensors", "model.layers.23.block_sparse_moe.experts.7.w2.weight": "model-00011-of-00019.safetensors", "model.layers.23.block_sparse_moe.experts.7.w3.weight": "model-00011-of-00019.safetensors", "model.layers.23.block_sparse_moe.gate.weight": "model-00011-of-00019.safetensors", "model.layers.23.input_layernorm.weight": "model-00011-of-00019.safetensors", "model.layers.23.post_attention_layernorm.weight": "model-00011-of-00019.safetensors", "model.layers.23.self_attn.k_proj.weight": "model-00011-of-00019.safetensors", "model.layers.23.self_attn.o_proj.weight": "model-00011-of-00019.safetensors", "model.layers.23.self_attn.q_proj.weight": "model-00011-of-00019.safetensors", "model.layers.23.self_attn.v_proj.weight": "model-00011-of-00019.safetensors", "model.layers.24.block_sparse_moe.experts.0.w1.weight": "model-00011-of-00019.safetensors", "model.layers.24.block_sparse_moe.experts.0.w2.weight": "model-00011-of-00019.safetensors", "model.layers.24.block_sparse_moe.experts.0.w3.weight": "model-00011-of-00019.safetensors", "model.layers.24.block_sparse_moe.experts.1.w1.weight": "model-00011-of-00019.safetensors", "model.layers.24.block_sparse_moe.experts.1.w2.weight": "model-00011-of-00019.safetensors", "model.layers.24.block_sparse_moe.experts.1.w3.weight": "model-00011-of-00019.safetensors", "model.layers.24.block_sparse_moe.experts.2.w1.weight": "model-00011-of-00019.safetensors", "model.layers.24.block_sparse_moe.experts.2.w2.weight": "model-00011-of-00019.safetensors", "model.layers.24.block_sparse_moe.experts.2.w3.weight": "model-00011-of-00019.safetensors", "model.layers.24.block_sparse_moe.experts.3.w1.weight": "model-00011-of-00019.safetensors", "model.layers.24.block_sparse_moe.experts.3.w2.weight": "model-00011-of-00019.safetensors", "model.layers.24.block_sparse_moe.experts.3.w3.weight": "model-00011-of-00019.safetensors", "model.layers.24.block_sparse_moe.experts.4.w1.weight": "model-00011-of-00019.safetensors", "model.layers.24.block_sparse_moe.experts.4.w2.weight": "model-00011-of-00019.safetensors", "model.layers.24.block_sparse_moe.experts.4.w3.weight": "model-00011-of-00019.safetensors", "model.layers.24.block_sparse_moe.experts.5.w1.weight": "model-00011-of-00019.safetensors", "model.layers.24.block_sparse_moe.experts.5.w2.weight": "model-00011-of-00019.safetensors", "model.layers.24.block_sparse_moe.experts.5.w3.weight": "model-00011-of-00019.safetensors", "model.layers.24.block_sparse_moe.experts.6.w1.weight": "model-00011-of-00019.safetensors", "model.layers.24.block_sparse_moe.experts.6.w2.weight": "model-00011-of-00019.safetensors", "model.layers.24.block_sparse_moe.experts.6.w3.weight": "model-00011-of-00019.safetensors", "model.layers.24.block_sparse_moe.experts.7.w1.weight": "model-00011-of-00019.safetensors", "model.layers.24.block_sparse_moe.experts.7.w2.weight": "model-00011-of-00019.safetensors", "model.layers.24.block_sparse_moe.experts.7.w3.weight": "model-00011-of-00019.safetensors", "model.layers.24.block_sparse_moe.gate.weight": "model-00011-of-00019.safetensors", "model.layers.24.input_layernorm.weight": "model-00011-of-00019.safetensors", "model.layers.24.post_attention_layernorm.weight": "model-00011-of-00019.safetensors", "model.layers.24.self_attn.k_proj.weight": "model-00011-of-00019.safetensors", "model.layers.24.self_attn.o_proj.weight": "model-00011-of-00019.safetensors", "model.layers.24.self_attn.q_proj.weight": "model-00011-of-00019.safetensors", "model.layers.24.self_attn.v_proj.weight": "model-00011-of-00019.safetensors", "model.layers.25.block_sparse_moe.experts.0.w1.weight": "model-00011-of-00019.safetensors", "model.layers.25.block_sparse_moe.experts.0.w2.weight": "model-00011-of-00019.safetensors", "model.layers.25.block_sparse_moe.experts.0.w3.weight": "model-00011-of-00019.safetensors", "model.layers.25.block_sparse_moe.experts.1.w1.weight": "model-00011-of-00019.safetensors", "model.layers.25.block_sparse_moe.experts.1.w2.weight": "model-00011-of-00019.safetensors", "model.layers.25.block_sparse_moe.experts.1.w3.weight": "model-00011-of-00019.safetensors", "model.layers.25.block_sparse_moe.experts.2.w1.weight": "model-00011-of-00019.safetensors", "model.layers.25.block_sparse_moe.experts.2.w2.weight": "model-00011-of-00019.safetensors", "model.layers.25.block_sparse_moe.experts.2.w3.weight": "model-00011-of-00019.safetensors", "model.layers.25.block_sparse_moe.experts.3.w1.weight": "model-00011-of-00019.safetensors", "model.layers.25.block_sparse_moe.experts.3.w2.weight": "model-00011-of-00019.safetensors", "model.layers.25.block_sparse_moe.experts.3.w3.weight": "model-00011-of-00019.safetensors", "model.layers.25.block_sparse_moe.experts.4.w1.weight": "model-00011-of-00019.safetensors", "model.layers.25.block_sparse_moe.experts.4.w2.weight": "model-00011-of-00019.safetensors", "model.layers.25.block_sparse_moe.experts.4.w3.weight": "model-00011-of-00019.safetensors", "model.layers.25.block_sparse_moe.experts.5.w1.weight": "model-00012-of-00019.safetensors", "model.layers.25.block_sparse_moe.experts.5.w2.weight": "model-00012-of-00019.safetensors", "model.layers.25.block_sparse_moe.experts.5.w3.weight": "model-00012-of-00019.safetensors", "model.layers.25.block_sparse_moe.experts.6.w1.weight": "model-00012-of-00019.safetensors", "model.layers.25.block_sparse_moe.experts.6.w2.weight": "model-00012-of-00019.safetensors", "model.layers.25.block_sparse_moe.experts.6.w3.weight": "model-00012-of-00019.safetensors", "model.layers.25.block_sparse_moe.experts.7.w1.weight": "model-00012-of-00019.safetensors", "model.layers.25.block_sparse_moe.experts.7.w2.weight": "model-00012-of-00019.safetensors", "model.layers.25.block_sparse_moe.experts.7.w3.weight": "model-00012-of-00019.safetensors", "model.layers.25.block_sparse_moe.gate.weight": "model-00012-of-00019.safetensors", "model.layers.25.input_layernorm.weight": "model-00012-of-00019.safetensors", "model.layers.25.post_attention_layernorm.weight": "model-00012-of-00019.safetensors", "model.layers.25.self_attn.k_proj.weight": "model-00012-of-00019.safetensors", "model.layers.25.self_attn.o_proj.weight": "model-00012-of-00019.safetensors", "model.layers.25.self_attn.q_proj.weight": "model-00012-of-00019.safetensors", "model.layers.25.self_attn.v_proj.weight": "model-00012-of-00019.safetensors", "model.layers.26.block_sparse_moe.experts.0.w1.weight": "model-00012-of-00019.safetensors", "model.layers.26.block_sparse_moe.experts.0.w2.weight": "model-00012-of-00019.safetensors", "model.layers.26.block_sparse_moe.experts.0.w3.weight": "model-00012-of-00019.safetensors", "model.layers.26.block_sparse_moe.experts.1.w1.weight": "model-00012-of-00019.safetensors", "model.layers.26.block_sparse_moe.experts.1.w2.weight": "model-00012-of-00019.safetensors", "model.layers.26.block_sparse_moe.experts.1.w3.weight": "model-00012-of-00019.safetensors", "model.layers.26.block_sparse_moe.experts.2.w1.weight": "model-00012-of-00019.safetensors", "model.layers.26.block_sparse_moe.experts.2.w2.weight": "model-00012-of-00019.safetensors", "model.layers.26.block_sparse_moe.experts.2.w3.weight": "model-00012-of-00019.safetensors", "model.layers.26.block_sparse_moe.experts.3.w1.weight": "model-00012-of-00019.safetensors", "model.layers.26.block_sparse_moe.experts.3.w2.weight": "model-00012-of-00019.safetensors", "model.layers.26.block_sparse_moe.experts.3.w3.weight": "model-00012-of-00019.safetensors", "model.layers.26.block_sparse_moe.experts.4.w1.weight": "model-00012-of-00019.safetensors", "model.layers.26.block_sparse_moe.experts.4.w2.weight": "model-00012-of-00019.safetensors", "model.layers.26.block_sparse_moe.experts.4.w3.weight": "model-00012-of-00019.safetensors", "model.layers.26.block_sparse_moe.experts.5.w1.weight": "model-00012-of-00019.safetensors", "model.layers.26.block_sparse_moe.experts.5.w2.weight": "model-00012-of-00019.safetensors", "model.layers.26.block_sparse_moe.experts.5.w3.weight": "model-00012-of-00019.safetensors", "model.layers.26.block_sparse_moe.experts.6.w1.weight": "model-00012-of-00019.safetensors", "model.layers.26.block_sparse_moe.experts.6.w2.weight": "model-00012-of-00019.safetensors", "model.layers.26.block_sparse_moe.experts.6.w3.weight": "model-00012-of-00019.safetensors", "model.layers.26.block_sparse_moe.experts.7.w1.weight": "model-00012-of-00019.safetensors", "model.layers.26.block_sparse_moe.experts.7.w2.weight": "model-00012-of-00019.safetensors", "model.layers.26.block_sparse_moe.experts.7.w3.weight": "model-00012-of-00019.safetensors", "model.layers.26.block_sparse_moe.gate.weight": "model-00012-of-00019.safetensors", "model.layers.26.input_layernorm.weight": "model-00012-of-00019.safetensors", "model.layers.26.post_attention_layernorm.weight": "model-00012-of-00019.safetensors", "model.layers.26.self_attn.k_proj.weight": "model-00012-of-00019.safetensors", "model.layers.26.self_attn.o_proj.weight": "model-00012-of-00019.safetensors", "model.layers.26.self_attn.q_proj.weight": "model-00012-of-00019.safetensors", "model.layers.26.self_attn.v_proj.weight": "model-00012-of-00019.safetensors", "model.layers.27.block_sparse_moe.experts.0.w1.weight": "model-00012-of-00019.safetensors", "model.layers.27.block_sparse_moe.experts.0.w2.weight": "model-00012-of-00019.safetensors", "model.layers.27.block_sparse_moe.experts.0.w3.weight": "model-00012-of-00019.safetensors", "model.layers.27.block_sparse_moe.experts.1.w1.weight": "model-00012-of-00019.safetensors", "model.layers.27.block_sparse_moe.experts.1.w2.weight": "model-00012-of-00019.safetensors", "model.layers.27.block_sparse_moe.experts.1.w3.weight": "model-00012-of-00019.safetensors", "model.layers.27.block_sparse_moe.experts.2.w1.weight": "model-00012-of-00019.safetensors", "model.layers.27.block_sparse_moe.experts.2.w2.weight": "model-00012-of-00019.safetensors", "model.layers.27.block_sparse_moe.experts.2.w3.weight": "model-00013-of-00019.safetensors", "model.layers.27.block_sparse_moe.experts.3.w1.weight": "model-00013-of-00019.safetensors", "model.layers.27.block_sparse_moe.experts.3.w2.weight": "model-00013-of-00019.safetensors", "model.layers.27.block_sparse_moe.experts.3.w3.weight": "model-00013-of-00019.safetensors", "model.layers.27.block_sparse_moe.experts.4.w1.weight": "model-00013-of-00019.safetensors", "model.layers.27.block_sparse_moe.experts.4.w2.weight": "model-00013-of-00019.safetensors", "model.layers.27.block_sparse_moe.experts.4.w3.weight": "model-00013-of-00019.safetensors", "model.layers.27.block_sparse_moe.experts.5.w1.weight": "model-00013-of-00019.safetensors", "model.layers.27.block_sparse_moe.experts.5.w2.weight": "model-00013-of-00019.safetensors", "model.layers.27.block_sparse_moe.experts.5.w3.weight": "model-00013-of-00019.safetensors", "model.layers.27.block_sparse_moe.experts.6.w1.weight": "model-00013-of-00019.safetensors", "model.layers.27.block_sparse_moe.experts.6.w2.weight": "model-00013-of-00019.safetensors", "model.layers.27.block_sparse_moe.experts.6.w3.weight": "model-00013-of-00019.safetensors", "model.layers.27.block_sparse_moe.experts.7.w1.weight": "model-00013-of-00019.safetensors", "model.layers.27.block_sparse_moe.experts.7.w2.weight": "model-00013-of-00019.safetensors", "model.layers.27.block_sparse_moe.experts.7.w3.weight": "model-00013-of-00019.safetensors", "model.layers.27.block_sparse_moe.gate.weight": "model-00013-of-00019.safetensors", "model.layers.27.input_layernorm.weight": "model-00013-of-00019.safetensors", "model.layers.27.post_attention_layernorm.weight": "model-00013-of-00019.safetensors", "model.layers.27.self_attn.k_proj.weight": "model-00013-of-00019.safetensors", "model.layers.27.self_attn.o_proj.weight": "model-00013-of-00019.safetensors", "model.layers.27.self_attn.q_proj.weight": "model-00013-of-00019.safetensors", "model.layers.27.self_attn.v_proj.weight": "model-00013-of-00019.safetensors", "model.layers.28.block_sparse_moe.experts.0.w1.weight": "model-00013-of-00019.safetensors", "model.layers.28.block_sparse_moe.experts.0.w2.weight": "model-00013-of-00019.safetensors", "model.layers.28.block_sparse_moe.experts.0.w3.weight": "model-00013-of-00019.safetensors", "model.layers.28.block_sparse_moe.experts.1.w1.weight": "model-00013-of-00019.safetensors", "model.layers.28.block_sparse_moe.experts.1.w2.weight": "model-00013-of-00019.safetensors", "model.layers.28.block_sparse_moe.experts.1.w3.weight": "model-00013-of-00019.safetensors", "model.layers.28.block_sparse_moe.experts.2.w1.weight": "model-00013-of-00019.safetensors", "model.layers.28.block_sparse_moe.experts.2.w2.weight": "model-00013-of-00019.safetensors", "model.layers.28.block_sparse_moe.experts.2.w3.weight": "model-00013-of-00019.safetensors", "model.layers.28.block_sparse_moe.experts.3.w1.weight": "model-00013-of-00019.safetensors", "model.layers.28.block_sparse_moe.experts.3.w2.weight": "model-00013-of-00019.safetensors", "model.layers.28.block_sparse_moe.experts.3.w3.weight": "model-00013-of-00019.safetensors", "model.layers.28.block_sparse_moe.experts.4.w1.weight": "model-00013-of-00019.safetensors", "model.layers.28.block_sparse_moe.experts.4.w2.weight": "model-00013-of-00019.safetensors", "model.layers.28.block_sparse_moe.experts.4.w3.weight": "model-00013-of-00019.safetensors", "model.layers.28.block_sparse_moe.experts.5.w1.weight": "model-00013-of-00019.safetensors", "model.layers.28.block_sparse_moe.experts.5.w2.weight": "model-00013-of-00019.safetensors", "model.layers.28.block_sparse_moe.experts.5.w3.weight": "model-00013-of-00019.safetensors", "model.layers.28.block_sparse_moe.experts.6.w1.weight": "model-00013-of-00019.safetensors", "model.layers.28.block_sparse_moe.experts.6.w2.weight": "model-00013-of-00019.safetensors", "model.layers.28.block_sparse_moe.experts.6.w3.weight": "model-00013-of-00019.safetensors", "model.layers.28.block_sparse_moe.experts.7.w1.weight": "model-00013-of-00019.safetensors", "model.layers.28.block_sparse_moe.experts.7.w2.weight": "model-00013-of-00019.safetensors", "model.layers.28.block_sparse_moe.experts.7.w3.weight": "model-00013-of-00019.safetensors", "model.layers.28.block_sparse_moe.gate.weight": "model-00013-of-00019.safetensors", "model.layers.28.input_layernorm.weight": "model-00013-of-00019.safetensors", "model.layers.28.post_attention_layernorm.weight": "model-00013-of-00019.safetensors", "model.layers.28.self_attn.k_proj.weight": "model-00013-of-00019.safetensors", "model.layers.28.self_attn.o_proj.weight": "model-00013-of-00019.safetensors", "model.layers.28.self_attn.q_proj.weight": "model-00013-of-00019.safetensors", "model.layers.28.self_attn.v_proj.weight": "model-00013-of-00019.safetensors", "model.layers.29.block_sparse_moe.experts.0.w1.weight": "model-00013-of-00019.safetensors", "model.layers.29.block_sparse_moe.experts.0.w2.weight": "model-00014-of-00019.safetensors", "model.layers.29.block_sparse_moe.experts.0.w3.weight": "model-00014-of-00019.safetensors", "model.layers.29.block_sparse_moe.experts.1.w1.weight": "model-00014-of-00019.safetensors", "model.layers.29.block_sparse_moe.experts.1.w2.weight": "model-00014-of-00019.safetensors", "model.layers.29.block_sparse_moe.experts.1.w3.weight": "model-00014-of-00019.safetensors", "model.layers.29.block_sparse_moe.experts.2.w1.weight": "model-00014-of-00019.safetensors", "model.layers.29.block_sparse_moe.experts.2.w2.weight": "model-00014-of-00019.safetensors", "model.layers.29.block_sparse_moe.experts.2.w3.weight": "model-00014-of-00019.safetensors", "model.layers.29.block_sparse_moe.experts.3.w1.weight": "model-00014-of-00019.safetensors", "model.layers.29.block_sparse_moe.experts.3.w2.weight": "model-00014-of-00019.safetensors", "model.layers.29.block_sparse_moe.experts.3.w3.weight": "model-00014-of-00019.safetensors", "model.layers.29.block_sparse_moe.experts.4.w1.weight": "model-00014-of-00019.safetensors", "model.layers.29.block_sparse_moe.experts.4.w2.weight": "model-00014-of-00019.safetensors", "model.layers.29.block_sparse_moe.experts.4.w3.weight": "model-00014-of-00019.safetensors", "model.layers.29.block_sparse_moe.experts.5.w1.weight": "model-00014-of-00019.safetensors", "model.layers.29.block_sparse_moe.experts.5.w2.weight": "model-00014-of-00019.safetensors", "model.layers.29.block_sparse_moe.experts.5.w3.weight": "model-00014-of-00019.safetensors", "model.layers.29.block_sparse_moe.experts.6.w1.weight": "model-00014-of-00019.safetensors", "model.layers.29.block_sparse_moe.experts.6.w2.weight": "model-00014-of-00019.safetensors", "model.layers.29.block_sparse_moe.experts.6.w3.weight": "model-00014-of-00019.safetensors", "model.layers.29.block_sparse_moe.experts.7.w1.weight": "model-00014-of-00019.safetensors", "model.layers.29.block_sparse_moe.experts.7.w2.weight": "model-00014-of-00019.safetensors", "model.layers.29.block_sparse_moe.experts.7.w3.weight": "model-00014-of-00019.safetensors", "model.layers.29.block_sparse_moe.gate.weight": "model-00014-of-00019.safetensors", "model.layers.29.input_layernorm.weight": "model-00014-of-00019.safetensors", "model.layers.29.post_attention_layernorm.weight": "model-00014-of-00019.safetensors", "model.layers.29.self_attn.k_proj.weight": "model-00014-of-00019.safetensors", "model.layers.29.self_attn.o_proj.weight": "model-00014-of-00019.safetensors", "model.layers.29.self_attn.q_proj.weight": "model-00014-of-00019.safetensors", "model.layers.29.self_attn.v_proj.weight": "model-00014-of-00019.safetensors", "model.layers.3.block_sparse_moe.experts.0.w1.weight": "model-00014-of-00019.safetensors", "model.layers.3.block_sparse_moe.experts.0.w2.weight": "model-00014-of-00019.safetensors", "model.layers.3.block_sparse_moe.experts.0.w3.weight": "model-00014-of-00019.safetensors", "model.layers.3.block_sparse_moe.experts.1.w1.weight": "model-00014-of-00019.safetensors", "model.layers.3.block_sparse_moe.experts.1.w2.weight": "model-00014-of-00019.safetensors", "model.layers.3.block_sparse_moe.experts.1.w3.weight": "model-00014-of-00019.safetensors", "model.layers.3.block_sparse_moe.experts.2.w1.weight": "model-00014-of-00019.safetensors", "model.layers.3.block_sparse_moe.experts.2.w2.weight": "model-00014-of-00019.safetensors", "model.layers.3.block_sparse_moe.experts.2.w3.weight": "model-00014-of-00019.safetensors", "model.layers.3.block_sparse_moe.experts.3.w1.weight": "model-00014-of-00019.safetensors", "model.layers.3.block_sparse_moe.experts.3.w2.weight": "model-00014-of-00019.safetensors", "model.layers.3.block_sparse_moe.experts.3.w3.weight": "model-00014-of-00019.safetensors", "model.layers.3.block_sparse_moe.experts.4.w1.weight": "model-00014-of-00019.safetensors", "model.layers.3.block_sparse_moe.experts.4.w2.weight": "model-00014-of-00019.safetensors", "model.layers.3.block_sparse_moe.experts.4.w3.weight": "model-00014-of-00019.safetensors", "model.layers.3.block_sparse_moe.experts.5.w1.weight": "model-00014-of-00019.safetensors", "model.layers.3.block_sparse_moe.experts.5.w2.weight": "model-00014-of-00019.safetensors", "model.layers.3.block_sparse_moe.experts.5.w3.weight": "model-00014-of-00019.safetensors", "model.layers.3.block_sparse_moe.experts.6.w1.weight": "model-00015-of-00019.safetensors", "model.layers.3.block_sparse_moe.experts.6.w2.weight": "model-00015-of-00019.safetensors", "model.layers.3.block_sparse_moe.experts.6.w3.weight": "model-00015-of-00019.safetensors", "model.layers.3.block_sparse_moe.experts.7.w1.weight": "model-00015-of-00019.safetensors", "model.layers.3.block_sparse_moe.experts.7.w2.weight": "model-00015-of-00019.safetensors", "model.layers.3.block_sparse_moe.experts.7.w3.weight": "model-00015-of-00019.safetensors", "model.layers.3.block_sparse_moe.gate.weight": "model-00015-of-00019.safetensors", "model.layers.3.input_layernorm.weight": "model-00015-of-00019.safetensors", "model.layers.3.post_attention_layernorm.weight": "model-00015-of-00019.safetensors", "model.layers.3.self_attn.k_proj.weight": "model-00015-of-00019.safetensors", "model.layers.3.self_attn.o_proj.weight": "model-00015-of-00019.safetensors", "model.layers.3.self_attn.q_proj.weight": "model-00015-of-00019.safetensors", "model.layers.3.self_attn.v_proj.weight": "model-00015-of-00019.safetensors", "model.layers.30.block_sparse_moe.experts.0.w1.weight": "model-00015-of-00019.safetensors", "model.layers.30.block_sparse_moe.experts.0.w2.weight": "model-00015-of-00019.safetensors", "model.layers.30.block_sparse_moe.experts.0.w3.weight": "model-00015-of-00019.safetensors", "model.layers.30.block_sparse_moe.experts.1.w1.weight": "model-00015-of-00019.safetensors", "model.layers.30.block_sparse_moe.experts.1.w2.weight": "model-00015-of-00019.safetensors", "model.layers.30.block_sparse_moe.experts.1.w3.weight": "model-00015-of-00019.safetensors", "model.layers.30.block_sparse_moe.experts.2.w1.weight": "model-00015-of-00019.safetensors", "model.layers.30.block_sparse_moe.experts.2.w2.weight": "model-00015-of-00019.safetensors", "model.layers.30.block_sparse_moe.experts.2.w3.weight": "model-00015-of-00019.safetensors", "model.layers.30.block_sparse_moe.experts.3.w1.weight": "model-00015-of-00019.safetensors", "model.layers.30.block_sparse_moe.experts.3.w2.weight": "model-00015-of-00019.safetensors", "model.layers.30.block_sparse_moe.experts.3.w3.weight": "model-00015-of-00019.safetensors", "model.layers.30.block_sparse_moe.experts.4.w1.weight": "model-00015-of-00019.safetensors", "model.layers.30.block_sparse_moe.experts.4.w2.weight": "model-00015-of-00019.safetensors", "model.layers.30.block_sparse_moe.experts.4.w3.weight": "model-00015-of-00019.safetensors", "model.layers.30.block_sparse_moe.experts.5.w1.weight": "model-00015-of-00019.safetensors", "model.layers.30.block_sparse_moe.experts.5.w2.weight": "model-00015-of-00019.safetensors", "model.layers.30.block_sparse_moe.experts.5.w3.weight": "model-00015-of-00019.safetensors", "model.layers.30.block_sparse_moe.experts.6.w1.weight": "model-00015-of-00019.safetensors", "model.layers.30.block_sparse_moe.experts.6.w2.weight": "model-00015-of-00019.safetensors", "model.layers.30.block_sparse_moe.experts.6.w3.weight": "model-00015-of-00019.safetensors", "model.layers.30.block_sparse_moe.experts.7.w1.weight": "model-00015-of-00019.safetensors", "model.layers.30.block_sparse_moe.experts.7.w2.weight": "model-00015-of-00019.safetensors", "model.layers.30.block_sparse_moe.experts.7.w3.weight": "model-00015-of-00019.safetensors", "model.layers.30.block_sparse_moe.gate.weight": "model-00015-of-00019.safetensors", "model.layers.30.input_layernorm.weight": "model-00015-of-00019.safetensors", "model.layers.30.post_attention_layernorm.weight": "model-00015-of-00019.safetensors", "model.layers.30.self_attn.k_proj.weight": "model-00015-of-00019.safetensors", "model.layers.30.self_attn.o_proj.weight": "model-00015-of-00019.safetensors", "model.layers.30.self_attn.q_proj.weight": "model-00015-of-00019.safetensors", "model.layers.30.self_attn.v_proj.weight": "model-00015-of-00019.safetensors", "model.layers.31.block_sparse_moe.experts.0.w1.weight": "model-00015-of-00019.safetensors", "model.layers.31.block_sparse_moe.experts.0.w2.weight": "model-00015-of-00019.safetensors", "model.layers.31.block_sparse_moe.experts.0.w3.weight": "model-00015-of-00019.safetensors", "model.layers.31.block_sparse_moe.experts.1.w1.weight": "model-00015-of-00019.safetensors", "model.layers.31.block_sparse_moe.experts.1.w2.weight": "model-00015-of-00019.safetensors", "model.layers.31.block_sparse_moe.experts.1.w3.weight": "model-00015-of-00019.safetensors", "model.layers.31.block_sparse_moe.experts.2.w1.weight": "model-00015-of-00019.safetensors", "model.layers.31.block_sparse_moe.experts.2.w2.weight": "model-00015-of-00019.safetensors", "model.layers.31.block_sparse_moe.experts.2.w3.weight": "model-00015-of-00019.safetensors", "model.layers.31.block_sparse_moe.experts.3.w1.weight": "model-00015-of-00019.safetensors", "model.layers.31.block_sparse_moe.experts.3.w2.weight": "model-00015-of-00019.safetensors", "model.layers.31.block_sparse_moe.experts.3.w3.weight": "model-00016-of-00019.safetensors", "model.layers.31.block_sparse_moe.experts.4.w1.weight": "model-00016-of-00019.safetensors", "model.layers.31.block_sparse_moe.experts.4.w2.weight": "model-00016-of-00019.safetensors", "model.layers.31.block_sparse_moe.experts.4.w3.weight": "model-00016-of-00019.safetensors", "model.layers.31.block_sparse_moe.experts.5.w1.weight": "model-00016-of-00019.safetensors", "model.layers.31.block_sparse_moe.experts.5.w2.weight": "model-00016-of-00019.safetensors", "model.layers.31.block_sparse_moe.experts.5.w3.weight": "model-00016-of-00019.safetensors", "model.layers.31.block_sparse_moe.experts.6.w1.weight": "model-00016-of-00019.safetensors", "model.layers.31.block_sparse_moe.experts.6.w2.weight": "model-00016-of-00019.safetensors", "model.layers.31.block_sparse_moe.experts.6.w3.weight": "model-00016-of-00019.safetensors", "model.layers.31.block_sparse_moe.experts.7.w1.weight": "model-00016-of-00019.safetensors", "model.layers.31.block_sparse_moe.experts.7.w2.weight": "model-00016-of-00019.safetensors", "model.layers.31.block_sparse_moe.experts.7.w3.weight": "model-00016-of-00019.safetensors", "model.layers.31.block_sparse_moe.gate.weight": "model-00016-of-00019.safetensors", "model.layers.31.input_layernorm.weight": "model-00016-of-00019.safetensors", "model.layers.31.post_attention_layernorm.weight": "model-00016-of-00019.safetensors", "model.layers.31.self_attn.k_proj.weight": "model-00016-of-00019.safetensors", "model.layers.31.self_attn.o_proj.weight": "model-00016-of-00019.safetensors", "model.layers.31.self_attn.q_proj.weight": "model-00016-of-00019.safetensors", "model.layers.31.self_attn.v_proj.weight": "model-00016-of-00019.safetensors", "model.layers.4.block_sparse_moe.experts.0.w1.weight": "model-00016-of-00019.safetensors", "model.layers.4.block_sparse_moe.experts.0.w2.weight": "model-00016-of-00019.safetensors", "model.layers.4.block_sparse_moe.experts.0.w3.weight": "model-00016-of-00019.safetensors", "model.layers.4.block_sparse_moe.experts.1.w1.weight": "model-00016-of-00019.safetensors", "model.layers.4.block_sparse_moe.experts.1.w2.weight": "model-00016-of-00019.safetensors", "model.layers.4.block_sparse_moe.experts.1.w3.weight": "model-00016-of-00019.safetensors", "model.layers.4.block_sparse_moe.experts.2.w1.weight": "model-00016-of-00019.safetensors", "model.layers.4.block_sparse_moe.experts.2.w2.weight": "model-00016-of-00019.safetensors", "model.layers.4.block_sparse_moe.experts.2.w3.weight": "model-00016-of-00019.safetensors", "model.layers.4.block_sparse_moe.experts.3.w1.weight": "model-00016-of-00019.safetensors", "model.layers.4.block_sparse_moe.experts.3.w2.weight": "model-00016-of-00019.safetensors", "model.layers.4.block_sparse_moe.experts.3.w3.weight": "model-00016-of-00019.safetensors", "model.layers.4.block_sparse_moe.experts.4.w1.weight": "model-00016-of-00019.safetensors", "model.layers.4.block_sparse_moe.experts.4.w2.weight": "model-00016-of-00019.safetensors", "model.layers.4.block_sparse_moe.experts.4.w3.weight": "model-00016-of-00019.safetensors", "model.layers.4.block_sparse_moe.experts.5.w1.weight": "model-00016-of-00019.safetensors", "model.layers.4.block_sparse_moe.experts.5.w2.weight": "model-00016-of-00019.safetensors", "model.layers.4.block_sparse_moe.experts.5.w3.weight": "model-00016-of-00019.safetensors", "model.layers.4.block_sparse_moe.experts.6.w1.weight": "model-00016-of-00019.safetensors", "model.layers.4.block_sparse_moe.experts.6.w2.weight": "model-00016-of-00019.safetensors", "model.layers.4.block_sparse_moe.experts.6.w3.weight": "model-00016-of-00019.safetensors", "model.layers.4.block_sparse_moe.experts.7.w1.weight": "model-00016-of-00019.safetensors", "model.layers.4.block_sparse_moe.experts.7.w2.weight": "model-00016-of-00019.safetensors", "model.layers.4.block_sparse_moe.experts.7.w3.weight": "model-00016-of-00019.safetensors", "model.layers.4.block_sparse_moe.gate.weight": "model-00016-of-00019.safetensors", "model.layers.4.input_layernorm.weight": "model-00016-of-00019.safetensors", "model.layers.4.post_attention_layernorm.weight": "model-00016-of-00019.safetensors", "model.layers.4.self_attn.k_proj.weight": "model-00016-of-00019.safetensors", "model.layers.4.self_attn.o_proj.weight": "model-00016-of-00019.safetensors", "model.layers.4.self_attn.q_proj.weight": "model-00016-of-00019.safetensors", "model.layers.4.self_attn.v_proj.weight": "model-00016-of-00019.safetensors", "model.layers.5.block_sparse_moe.experts.0.w1.weight": "model-00016-of-00019.safetensors", "model.layers.5.block_sparse_moe.experts.0.w2.weight": "model-00016-of-00019.safetensors", "model.layers.5.block_sparse_moe.experts.0.w3.weight": "model-00016-of-00019.safetensors", "model.layers.5.block_sparse_moe.experts.1.w1.weight": "model-00016-of-00019.safetensors", "model.layers.5.block_sparse_moe.experts.1.w2.weight": "model-00017-of-00019.safetensors", "model.layers.5.block_sparse_moe.experts.1.w3.weight": "model-00017-of-00019.safetensors", "model.layers.5.block_sparse_moe.experts.2.w1.weight": "model-00017-of-00019.safetensors", "model.layers.5.block_sparse_moe.experts.2.w2.weight": "model-00017-of-00019.safetensors", "model.layers.5.block_sparse_moe.experts.2.w3.weight": "model-00017-of-00019.safetensors", "model.layers.5.block_sparse_moe.experts.3.w1.weight": "model-00017-of-00019.safetensors", "model.layers.5.block_sparse_moe.experts.3.w2.weight": "model-00017-of-00019.safetensors", "model.layers.5.block_sparse_moe.experts.3.w3.weight": "model-00017-of-00019.safetensors", "model.layers.5.block_sparse_moe.experts.4.w1.weight": "model-00017-of-00019.safetensors", "model.layers.5.block_sparse_moe.experts.4.w2.weight": "model-00017-of-00019.safetensors", "model.layers.5.block_sparse_moe.experts.4.w3.weight": "model-00017-of-00019.safetensors", "model.layers.5.block_sparse_moe.experts.5.w1.weight": "model-00017-of-00019.safetensors", "model.layers.5.block_sparse_moe.experts.5.w2.weight": "model-00017-of-00019.safetensors", "model.layers.5.block_sparse_moe.experts.5.w3.weight": "model-00017-of-00019.safetensors", "model.layers.5.block_sparse_moe.experts.6.w1.weight": "model-00017-of-00019.safetensors", "model.layers.5.block_sparse_moe.experts.6.w2.weight": "model-00017-of-00019.safetensors", "model.layers.5.block_sparse_moe.experts.6.w3.weight": "model-00017-of-00019.safetensors", "model.layers.5.block_sparse_moe.experts.7.w1.weight": "model-00017-of-00019.safetensors", "model.layers.5.block_sparse_moe.experts.7.w2.weight": "model-00017-of-00019.safetensors", "model.layers.5.block_sparse_moe.experts.7.w3.weight": "model-00017-of-00019.safetensors", "model.layers.5.block_sparse_moe.gate.weight": "model-00017-of-00019.safetensors", "model.layers.5.input_layernorm.weight": "model-00017-of-00019.safetensors", "model.layers.5.post_attention_layernorm.weight": "model-00017-of-00019.safetensors", "model.layers.5.self_attn.k_proj.weight": "model-00017-of-00019.safetensors", "model.layers.5.self_attn.o_proj.weight": "model-00017-of-00019.safetensors", "model.layers.5.self_attn.q_proj.weight": "model-00017-of-00019.safetensors", "model.layers.5.self_attn.v_proj.weight": "model-00017-of-00019.safetensors", "model.layers.6.block_sparse_moe.experts.0.w1.weight": "model-00017-of-00019.safetensors", "model.layers.6.block_sparse_moe.experts.0.w2.weight": "model-00017-of-00019.safetensors", "model.layers.6.block_sparse_moe.experts.0.w3.weight": "model-00017-of-00019.safetensors", "model.layers.6.block_sparse_moe.experts.1.w1.weight": "model-00017-of-00019.safetensors", "model.layers.6.block_sparse_moe.experts.1.w2.weight": "model-00017-of-00019.safetensors", "model.layers.6.block_sparse_moe.experts.1.w3.weight": "model-00017-of-00019.safetensors", "model.layers.6.block_sparse_moe.experts.2.w1.weight": "model-00017-of-00019.safetensors", "model.layers.6.block_sparse_moe.experts.2.w2.weight": "model-00017-of-00019.safetensors", "model.layers.6.block_sparse_moe.experts.2.w3.weight": "model-00017-of-00019.safetensors", "model.layers.6.block_sparse_moe.experts.3.w1.weight": "model-00017-of-00019.safetensors", "model.layers.6.block_sparse_moe.experts.3.w2.weight": "model-00017-of-00019.safetensors", "model.layers.6.block_sparse_moe.experts.3.w3.weight": "model-00017-of-00019.safetensors", "model.layers.6.block_sparse_moe.experts.4.w1.weight": "model-00017-of-00019.safetensors", "model.layers.6.block_sparse_moe.experts.4.w2.weight": "model-00017-of-00019.safetensors", "model.layers.6.block_sparse_moe.experts.4.w3.weight": "model-00017-of-00019.safetensors", "model.layers.6.block_sparse_moe.experts.5.w1.weight": "model-00017-of-00019.safetensors", "model.layers.6.block_sparse_moe.experts.5.w2.weight": "model-00017-of-00019.safetensors", "model.layers.6.block_sparse_moe.experts.5.w3.weight": "model-00017-of-00019.safetensors", "model.layers.6.block_sparse_moe.experts.6.w1.weight": "model-00017-of-00019.safetensors", "model.layers.6.block_sparse_moe.experts.6.w2.weight": "model-00017-of-00019.safetensors", "model.layers.6.block_sparse_moe.experts.6.w3.weight": "model-00017-of-00019.safetensors", "model.layers.6.block_sparse_moe.experts.7.w1.weight": "model-00018-of-00019.safetensors", "model.layers.6.block_sparse_moe.experts.7.w2.weight": "model-00018-of-00019.safetensors", "model.layers.6.block_sparse_moe.experts.7.w3.weight": "model-00018-of-00019.safetensors", "model.layers.6.block_sparse_moe.gate.weight": "model-00018-of-00019.safetensors", "model.layers.6.input_layernorm.weight": "model-00018-of-00019.safetensors", "model.layers.6.post_attention_layernorm.weight": "model-00018-of-00019.safetensors", "model.layers.6.self_attn.k_proj.weight": "model-00018-of-00019.safetensors", "model.layers.6.self_attn.o_proj.weight": "model-00018-of-00019.safetensors", "model.layers.6.self_attn.q_proj.weight": "model-00018-of-00019.safetensors", "model.layers.6.self_attn.v_proj.weight": "model-00018-of-00019.safetensors", "model.layers.7.block_sparse_moe.experts.0.w1.weight": "model-00018-of-00019.safetensors", "model.layers.7.block_sparse_moe.experts.0.w2.weight": "model-00018-of-00019.safetensors", "model.layers.7.block_sparse_moe.experts.0.w3.weight": "model-00018-of-00019.safetensors", "model.layers.7.block_sparse_moe.experts.1.w1.weight": "model-00018-of-00019.safetensors", "model.layers.7.block_sparse_moe.experts.1.w2.weight": "model-00018-of-00019.safetensors", "model.layers.7.block_sparse_moe.experts.1.w3.weight": "model-00018-of-00019.safetensors", "model.layers.7.block_sparse_moe.experts.2.w1.weight": "model-00018-of-00019.safetensors", "model.layers.7.block_sparse_moe.experts.2.w2.weight": "model-00018-of-00019.safetensors", "model.layers.7.block_sparse_moe.experts.2.w3.weight": "model-00018-of-00019.safetensors", "model.layers.7.block_sparse_moe.experts.3.w1.weight": "model-00018-of-00019.safetensors", "model.layers.7.block_sparse_moe.experts.3.w2.weight": "model-00018-of-00019.safetensors", "model.layers.7.block_sparse_moe.experts.3.w3.weight": "model-00018-of-00019.safetensors", "model.layers.7.block_sparse_moe.experts.4.w1.weight": "model-00018-of-00019.safetensors", "model.layers.7.block_sparse_moe.experts.4.w2.weight": "model-00018-of-00019.safetensors", "model.layers.7.block_sparse_moe.experts.4.w3.weight": "model-00018-of-00019.safetensors", "model.layers.7.block_sparse_moe.experts.5.w1.weight": "model-00018-of-00019.safetensors", "model.layers.7.block_sparse_moe.experts.5.w2.weight": "model-00018-of-00019.safetensors", "model.layers.7.block_sparse_moe.experts.5.w3.weight": "model-00018-of-00019.safetensors", "model.layers.7.block_sparse_moe.experts.6.w1.weight": "model-00018-of-00019.safetensors", "model.layers.7.block_sparse_moe.experts.6.w2.weight": "model-00018-of-00019.safetensors", "model.layers.7.block_sparse_moe.experts.6.w3.weight": "model-00018-of-00019.safetensors", "model.layers.7.block_sparse_moe.experts.7.w1.weight": "model-00018-of-00019.safetensors", "model.layers.7.block_sparse_moe.experts.7.w2.weight": "model-00018-of-00019.safetensors", "model.layers.7.block_sparse_moe.experts.7.w3.weight": "model-00018-of-00019.safetensors", "model.layers.7.block_sparse_moe.gate.weight": "model-00018-of-00019.safetensors", "model.layers.7.input_layernorm.weight": "model-00018-of-00019.safetensors", "model.layers.7.post_attention_layernorm.weight": "model-00018-of-00019.safetensors", "model.layers.7.self_attn.k_proj.weight": "model-00018-of-00019.safetensors", "model.layers.7.self_attn.o_proj.weight": "model-00018-of-00019.safetensors", "model.layers.7.self_attn.q_proj.weight": "model-00018-of-00019.safetensors", "model.layers.7.self_attn.v_proj.weight": "model-00018-of-00019.safetensors", "model.layers.8.block_sparse_moe.experts.0.w1.weight": "model-00018-of-00019.safetensors", "model.layers.8.block_sparse_moe.experts.0.w2.weight": "model-00018-of-00019.safetensors", "model.layers.8.block_sparse_moe.experts.0.w3.weight": "model-00018-of-00019.safetensors", "model.layers.8.block_sparse_moe.experts.1.w1.weight": "model-00018-of-00019.safetensors", "model.layers.8.block_sparse_moe.experts.1.w2.weight": "model-00018-of-00019.safetensors", "model.layers.8.block_sparse_moe.experts.1.w3.weight": "model-00018-of-00019.safetensors", "model.layers.8.block_sparse_moe.experts.2.w1.weight": "model-00018-of-00019.safetensors", "model.layers.8.block_sparse_moe.experts.2.w2.weight": "model-00018-of-00019.safetensors", "model.layers.8.block_sparse_moe.experts.2.w3.weight": "model-00018-of-00019.safetensors", "model.layers.8.block_sparse_moe.experts.3.w1.weight": "model-00018-of-00019.safetensors", "model.layers.8.block_sparse_moe.experts.3.w2.weight": "model-00018-of-00019.safetensors", "model.layers.8.block_sparse_moe.experts.3.w3.weight": "model-00018-of-00019.safetensors", "model.layers.8.block_sparse_moe.experts.4.w1.weight": "model-00018-of-00019.safetensors", "model.layers.8.block_sparse_moe.experts.4.w2.weight": "model-00018-of-00019.safetensors", "model.layers.8.block_sparse_moe.experts.4.w3.weight": "model-00019-of-00019.safetensors", "model.layers.8.block_sparse_moe.experts.5.w1.weight": "model-00019-of-00019.safetensors", "model.layers.8.block_sparse_moe.experts.5.w2.weight": "model-00019-of-00019.safetensors", "model.layers.8.block_sparse_moe.experts.5.w3.weight": "model-00019-of-00019.safetensors", "model.layers.8.block_sparse_moe.experts.6.w1.weight": "model-00019-of-00019.safetensors", "model.layers.8.block_sparse_moe.experts.6.w2.weight": "model-00019-of-00019.safetensors", "model.layers.8.block_sparse_moe.experts.6.w3.weight": "model-00019-of-00019.safetensors", "model.layers.8.block_sparse_moe.experts.7.w1.weight": "model-00019-of-00019.safetensors", "model.layers.8.block_sparse_moe.experts.7.w2.weight": "model-00019-of-00019.safetensors", "model.layers.8.block_sparse_moe.experts.7.w3.weight": "model-00019-of-00019.safetensors", "model.layers.8.block_sparse_moe.gate.weight": "model-00019-of-00019.safetensors", "model.layers.8.input_layernorm.weight": "model-00019-of-00019.safetensors", "model.layers.8.post_attention_layernorm.weight": "model-00019-of-00019.safetensors", "model.layers.8.self_attn.k_proj.weight": "model-00019-of-00019.safetensors", "model.layers.8.self_attn.o_proj.weight": "model-00019-of-00019.safetensors", "model.layers.8.self_attn.q_proj.weight": "model-00019-of-00019.safetensors", "model.layers.8.self_attn.v_proj.weight": "model-00019-of-00019.safetensors", "model.layers.9.block_sparse_moe.experts.0.w1.weight": "model-00019-of-00019.safetensors", "model.layers.9.block_sparse_moe.experts.0.w2.weight": "model-00019-of-00019.safetensors", "model.layers.9.block_sparse_moe.experts.0.w3.weight": "model-00019-of-00019.safetensors", "model.layers.9.block_sparse_moe.experts.1.w1.weight": "model-00019-of-00019.safetensors", "model.layers.9.block_sparse_moe.experts.1.w2.weight": "model-00019-of-00019.safetensors", "model.layers.9.block_sparse_moe.experts.1.w3.weight": "model-00019-of-00019.safetensors", "model.layers.9.block_sparse_moe.experts.2.w1.weight": "model-00019-of-00019.safetensors", "model.layers.9.block_sparse_moe.experts.2.w2.weight": "model-00019-of-00019.safetensors", "model.layers.9.block_sparse_moe.experts.2.w3.weight": "model-00019-of-00019.safetensors", "model.layers.9.block_sparse_moe.experts.3.w1.weight": "model-00019-of-00019.safetensors", "model.layers.9.block_sparse_moe.experts.3.w2.weight": "model-00019-of-00019.safetensors", "model.layers.9.block_sparse_moe.experts.3.w3.weight": "model-00019-of-00019.safetensors", "model.layers.9.block_sparse_moe.experts.4.w1.weight": "model-00019-of-00019.safetensors", "model.layers.9.block_sparse_moe.experts.4.w2.weight": "model-00019-of-00019.safetensors", "model.layers.9.block_sparse_moe.experts.4.w3.weight": "model-00019-of-00019.safetensors", "model.layers.9.block_sparse_moe.experts.5.w1.weight": "model-00019-of-00019.safetensors", "model.layers.9.block_sparse_moe.experts.5.w2.weight": "model-00019-of-00019.safetensors", "model.layers.9.block_sparse_moe.experts.5.w3.weight": "model-00019-of-00019.safetensors", "model.layers.9.block_sparse_moe.experts.6.w1.weight": "model-00019-of-00019.safetensors", "model.layers.9.block_sparse_moe.experts.6.w2.weight": "model-00019-of-00019.safetensors", "model.layers.9.block_sparse_moe.experts.6.w3.weight": "model-00019-of-00019.safetensors", "model.layers.9.block_sparse_moe.experts.7.w1.weight": "model-00019-of-00019.safetensors", "model.layers.9.block_sparse_moe.experts.7.w2.weight": "model-00019-of-00019.safetensors", "model.layers.9.block_sparse_moe.experts.7.w3.weight": "model-00019-of-00019.safetensors", "model.layers.9.block_sparse_moe.gate.weight": "model-00019-of-00019.safetensors", "model.layers.9.input_layernorm.weight": "model-00019-of-00019.safetensors", "model.layers.9.post_attention_layernorm.weight": "model-00019-of-00019.safetensors", "model.layers.9.self_attn.k_proj.weight": "model-00019-of-00019.safetensors", "model.layers.9.self_attn.o_proj.weight": "model-00019-of-00019.safetensors", "model.layers.9.self_attn.q_proj.weight": "model-00019-of-00019.safetensors", "model.layers.9.self_attn.v_proj.weight": "model-00019-of-00019.safetensors", "model.norm.weight": "model-00019-of-00019.safetensors"}}
special_tokens_map.json ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ {
2
+ "bos_token": "<s>",
3
+ "eos_token": "</s>",
4
+ "unk_token": "<unk>"
5
+ }
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer.model ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dadfd56d766715c61d2ef780a525ab43b8e6da4de6865bda3d95fdef5e134055
3
+ size 493443
tokenizer_config.json ADDED
@@ -0,0 +1,42 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_bos_token": true,
3
+ "add_eos_token": false,
4
+ "added_tokens_decoder": {
5
+ "0": {
6
+ "content": "<unk>",
7
+ "lstrip": false,
8
+ "normalized": false,
9
+ "rstrip": false,
10
+ "single_word": false,
11
+ "special": true
12
+ },
13
+ "1": {
14
+ "content": "<s>",
15
+ "lstrip": false,
16
+ "normalized": false,
17
+ "rstrip": false,
18
+ "single_word": false,
19
+ "special": true
20
+ },
21
+ "2": {
22
+ "content": "</s>",
23
+ "lstrip": false,
24
+ "normalized": false,
25
+ "rstrip": false,
26
+ "single_word": false,
27
+ "special": true
28
+ }
29
+ },
30
+ "additional_special_tokens": [],
31
+ "bos_token": "<s>",
32
+ "clean_up_tokenization_spaces": false,
33
+ "eos_token": "</s>",
34
+ "legacy": true,
35
+ "model_max_length": 1000000000000000019884624838656,
36
+ "pad_token": null,
37
+ "sp_model_kwargs": {},
38
+ "spaces_between_special_tokens": false,
39
+ "tokenizer_class": "LlamaTokenizer",
40
+ "unk_token": "<unk>",
41
+ "use_default_system_prompt": false
42
+ }