Delnith commited on
Commit
5434cdb
·
verified ·
1 Parent(s): 40de4cd

Upload 4-bit GPTQ model

Browse files
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ tokenizer.json filter=lfs diff=lfs merge=lfs -text
added_tokens.json ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "</tool_call>": 151658,
3
+ "<tool_call>": 151657,
4
+ "<|box_end|>": 151649,
5
+ "<|box_start|>": 151648,
6
+ "<|endoftext|>": 151643,
7
+ "<|file_sep|>": 151664,
8
+ "<|fim_middle|>": 151660,
9
+ "<|fim_pad|>": 151662,
10
+ "<|fim_prefix|>": 151659,
11
+ "<|fim_suffix|>": 151661,
12
+ "<|im_end|>": 151645,
13
+ "<|im_start|>": 151644,
14
+ "<|image_pad|>": 151655,
15
+ "<|object_ref_end|>": 151647,
16
+ "<|object_ref_start|>": 151646,
17
+ "<|quad_end|>": 151651,
18
+ "<|quad_start|>": 151650,
19
+ "<|repo_name|>": 151663,
20
+ "<|video_pad|>": 151656,
21
+ "<|vision_end|>": 151653,
22
+ "<|vision_pad|>": 151654,
23
+ "<|vision_start|>": 151652
24
+ }
chat_template.jinja ADDED
@@ -0,0 +1,54 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {%- if tools %}
2
+ {{- '<|im_start|>system\n' }}
3
+ {%- if messages[0]['role'] == 'system' %}
4
+ {{- messages[0]['content'] }}
5
+ {%- else %}
6
+ {{- 'You are Qwen, created by Alibaba Cloud. You are a helpful assistant.' }}
7
+ {%- endif %}
8
+ {{- "\n\n# Tools\n\nYou may call one or more functions to assist with the user query.\n\nYou are provided with function signatures within <tools></tools> XML tags:\n<tools>" }}
9
+ {%- for tool in tools %}
10
+ {{- "\n" }}
11
+ {{- tool | tojson }}
12
+ {%- endfor %}
13
+ {{- "\n</tools>\n\nFor each function call, return a json object with function name and arguments within <tool_call></tool_call> XML tags:\n<tool_call>\n{\"name\": <function-name>, \"arguments\": <args-json-object>}\n</tool_call><|im_end|>\n" }}
14
+ {%- else %}
15
+ {%- if messages[0]['role'] == 'system' %}
16
+ {{- '<|im_start|>system\n' + messages[0]['content'] + '<|im_end|>\n' }}
17
+ {%- else %}
18
+ {{- '<|im_start|>system\nYou are Qwen, created by Alibaba Cloud. You are a helpful assistant.<|im_end|>\n' }}
19
+ {%- endif %}
20
+ {%- endif %}
21
+ {%- for message in messages %}
22
+ {%- if (message.role == "user") or (message.role == "system" and not loop.first) or (message.role == "assistant" and not message.tool_calls) %}
23
+ {{- '<|im_start|>' + message.role + '\n' + message.content + '<|im_end|>' + '\n' }}
24
+ {%- elif message.role == "assistant" %}
25
+ {{- '<|im_start|>' + message.role }}
26
+ {%- if message.content %}
27
+ {{- '\n' + message.content }}
28
+ {%- endif %}
29
+ {%- for tool_call in message.tool_calls %}
30
+ {%- if tool_call.function is defined %}
31
+ {%- set tool_call = tool_call.function %}
32
+ {%- endif %}
33
+ {{- '\n<tool_call>\n{"name": "' }}
34
+ {{- tool_call.name }}
35
+ {{- '", "arguments": ' }}
36
+ {{- tool_call.arguments | tojson }}
37
+ {{- '}\n</tool_call>' }}
38
+ {%- endfor %}
39
+ {{- '<|im_end|>\n' }}
40
+ {%- elif message.role == "tool" %}
41
+ {%- if (loop.index0 == 0) or (messages[loop.index0 - 1].role != "tool") %}
42
+ {{- '<|im_start|>user' }}
43
+ {%- endif %}
44
+ {{- '\n<tool_response>\n' }}
45
+ {{- message.content }}
46
+ {{- '\n</tool_response>' }}
47
+ {%- if loop.last or (messages[loop.index0 + 1].role != "tool") %}
48
+ {{- '<|im_end|>\n' }}
49
+ {%- endif %}
50
+ {%- endif %}
51
+ {%- endfor %}
52
+ {%- if add_generation_prompt %}
53
+ {{- '<|im_start|>assistant\n' }}
54
+ {%- endif %}
config.json ADDED
@@ -0,0 +1,102 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "Qwen2ForCausalLM"
4
+ ],
5
+ "attention_dropout": 0.0,
6
+ "bos_token_id": 151643,
7
+ "eos_token_id": 151645,
8
+ "hidden_act": "silu",
9
+ "hidden_size": 5120,
10
+ "initializer_range": 0.02,
11
+ "intermediate_size": 13824,
12
+ "layer_types": [
13
+ "full_attention",
14
+ "full_attention",
15
+ "full_attention",
16
+ "full_attention",
17
+ "full_attention",
18
+ "full_attention",
19
+ "full_attention",
20
+ "full_attention",
21
+ "full_attention",
22
+ "full_attention",
23
+ "full_attention",
24
+ "full_attention",
25
+ "full_attention",
26
+ "full_attention",
27
+ "full_attention",
28
+ "full_attention",
29
+ "full_attention",
30
+ "full_attention",
31
+ "full_attention",
32
+ "full_attention",
33
+ "full_attention",
34
+ "full_attention",
35
+ "full_attention",
36
+ "full_attention",
37
+ "full_attention",
38
+ "full_attention",
39
+ "full_attention",
40
+ "full_attention",
41
+ "full_attention",
42
+ "full_attention",
43
+ "full_attention",
44
+ "full_attention",
45
+ "full_attention",
46
+ "full_attention",
47
+ "full_attention",
48
+ "full_attention",
49
+ "full_attention",
50
+ "full_attention",
51
+ "full_attention",
52
+ "full_attention",
53
+ "full_attention",
54
+ "full_attention",
55
+ "full_attention",
56
+ "full_attention",
57
+ "full_attention",
58
+ "full_attention",
59
+ "full_attention",
60
+ "full_attention"
61
+ ],
62
+ "max_position_embeddings": 32768,
63
+ "max_window_layers": 70,
64
+ "model_type": "qwen2",
65
+ "num_attention_heads": 40,
66
+ "num_hidden_layers": 48,
67
+ "num_key_value_heads": 8,
68
+ "quantization_config": {
69
+ "bits": 4,
70
+ "checkpoint_format": "gptq",
71
+ "desc_act": true,
72
+ "group_size": 128,
73
+ "hyb_act": false,
74
+ "lm_head": false,
75
+ "meta": {
76
+ "damp_auto_increment": 0.01,
77
+ "damp_percent": 0.05,
78
+ "mse": 0.0,
79
+ "quantizer": [
80
+ "gptqmodel:4.0.0"
81
+ ],
82
+ "static_groups": false,
83
+ "true_sequential": true,
84
+ "uri": "https://github.com/modelcloud/gptqmodel",
85
+ "v2": false,
86
+ "v2_alpha": 0.25
87
+ },
88
+ "pack_dtype": "int32",
89
+ "quant_method": "gptq",
90
+ "sym": true
91
+ },
92
+ "rms_norm_eps": 1e-06,
93
+ "rope_scaling": null,
94
+ "rope_theta": 1000000.0,
95
+ "sliding_window": null,
96
+ "tie_word_embeddings": false,
97
+ "torch_dtype": "bfloat16",
98
+ "transformers_version": "4.55.4",
99
+ "use_cache": true,
100
+ "use_sliding_window": false,
101
+ "vocab_size": 152064
102
+ }
generation_config.json ADDED
@@ -0,0 +1,12 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token_id": 151643,
3
+ "do_sample": true,
4
+ "eos_token_id": 151645,
5
+ "max_new_tokens": 2048,
6
+ "pad_token_id": 151643,
7
+ "repetition_penalty": 1.05,
8
+ "temperature": 0.7,
9
+ "top_k": 20,
10
+ "top_p": 0.8,
11
+ "transformers_version": "4.55.4"
12
+ }
merges.txt ADDED
The diff for this file is too large to render. See raw diff
 
model-00001-of-00003.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2e708fcc22191d69a9acb871680ea934760a2ec6caf773fc199e63c019569040
3
+ size 3991846296
model-00002-of-00003.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c2c143c36456dfdb9fa3380c142d88ee5da2284bd8bec51c1696e9c4f5554345
3
+ size 3973264384
model-00003-of-00003.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8634bc4198b18484fa0aa8e17b231eacc27eba526598214d6f54e149d778e68c
3
+ size 2023648928
model.safetensors.index.json ADDED
The diff for this file is too large to render. See raw diff
 
quant_log.csv ADDED
@@ -0,0 +1,337 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ layer,module,loss,samples,damp,time
2
+ 0,self_attn.k_proj,0.0000003836,0.05000,1.015
3
+ 0,self_attn.v_proj,0.0000000782,0.05000,0.726
4
+ 0,self_attn.q_proj,0.0000010448,0.05000,0.754
5
+ 0,self_attn.o_proj,0.0000005473,0.05000,0.760
6
+ 0,mlp.gate_proj,0.0000007420,0.05000,0.766
7
+ 0,mlp.up_proj,0.0000006467,0.05000,0.736
8
+ 0,mlp.down_proj,0.0000005687,0.05000,2.183
9
+ 1,self_attn.k_proj,0.0000000137,0.05000,0.758
10
+ 1,self_attn.v_proj,0.0000000057,0.05000,0.770
11
+ 1,self_attn.q_proj,0.0000000513,0.05000,0.827
12
+ 1,self_attn.o_proj,0.0000000077,0.05000,0.815
13
+ 1,mlp.gate_proj,0.0000066293,0.05000,0.778
14
+ 1,mlp.up_proj,0.0000029132,0.05000,0.833
15
+ 1,mlp.down_proj,0.0000000930,0.05000,2.140
16
+ 2,self_attn.k_proj,0.0000000650,0.05000,0.752
17
+ 2,self_attn.v_proj,0.0000000226,0.05000,0.731
18
+ 2,self_attn.q_proj,0.0000001794,0.05000,0.745
19
+ 2,self_attn.o_proj,0.0000000498,0.05000,0.739
20
+ 2,mlp.gate_proj,0.0000135490,0.05000,0.736
21
+ 2,mlp.up_proj,0.0000079025,0.05000,0.731
22
+ 2,mlp.down_proj,0.0000003571,0.05000,2.234
23
+ 3,self_attn.k_proj,0.0000003933,0.05000,0.732
24
+ 3,self_attn.v_proj,0.0000001049,0.05000,0.713
25
+ 3,self_attn.q_proj,0.0000010451,0.05000,0.721
26
+ 3,self_attn.o_proj,0.0000001377,0.05000,0.727
27
+ 3,mlp.gate_proj,0.0000162165,0.05000,0.741
28
+ 3,mlp.up_proj,0.0000082294,0.05000,0.730
29
+ 3,mlp.down_proj,0.0000006188,0.05000,2.142
30
+ 4,self_attn.k_proj,0.0000004112,0.05000,0.735
31
+ 4,self_attn.v_proj,0.0000001402,0.05000,0.713
32
+ 4,self_attn.q_proj,0.0000011218,0.05000,0.718
33
+ 4,self_attn.o_proj,0.0000002406,0.05000,0.750
34
+ 4,mlp.gate_proj,0.0000267092,0.05000,0.803
35
+ 4,mlp.up_proj,0.0000151525,0.05000,0.815
36
+ 4,mlp.down_proj,0.0000964310,0.05000,2.161
37
+ 5,self_attn.k_proj,0.0000013297,0.05000,0.733
38
+ 5,self_attn.v_proj,0.0000005998,0.05000,0.724
39
+ 5,self_attn.q_proj,0.0000043050,0.05000,0.728
40
+ 5,self_attn.o_proj,0.0000002593,0.05000,0.729
41
+ 5,mlp.gate_proj,0.0000366176,0.05000,0.748
42
+ 5,mlp.up_proj,0.0000200382,0.05000,0.734
43
+ 5,mlp.down_proj,0.0000034205,0.05000,2.164
44
+ 6,self_attn.k_proj,0.0000013150,0.05000,0.738
45
+ 6,self_attn.v_proj,0.0000007459,0.05000,0.728
46
+ 6,self_attn.q_proj,0.0000045390,0.05000,0.726
47
+ 6,self_attn.o_proj,0.0000002721,0.05000,0.727
48
+ 6,mlp.gate_proj,0.0000514023,0.05000,0.741
49
+ 6,mlp.up_proj,0.0000293805,0.05000,0.732
50
+ 6,mlp.down_proj,0.0000023108,0.05000,2.140
51
+ 7,self_attn.k_proj,0.0000017118,0.05000,0.738
52
+ 7,self_attn.v_proj,0.0000010427,0.05000,0.719
53
+ 7,self_attn.q_proj,0.0000061210,0.05000,0.726
54
+ 7,self_attn.o_proj,0.0000002765,0.05000,0.734
55
+ 7,mlp.gate_proj,0.0000617160,0.05000,0.748
56
+ 7,mlp.up_proj,0.0000353903,0.05000,0.735
57
+ 7,mlp.down_proj,0.0000016504,0.05000,2.157
58
+ 8,self_attn.k_proj,0.0000018246,0.05000,0.732
59
+ 8,self_attn.v_proj,0.0000009054,0.05000,0.718
60
+ 8,self_attn.q_proj,0.0000061746,0.05000,0.727
61
+ 8,self_attn.o_proj,0.0000003254,0.05000,0.730
62
+ 8,mlp.gate_proj,0.0000366315,0.05000,0.759
63
+ 8,mlp.up_proj,0.0000218339,0.05000,0.745
64
+ 8,mlp.down_proj,0.0000022364,0.05000,2.214
65
+ 9,self_attn.k_proj,0.0000015798,0.05000,0.779
66
+ 9,self_attn.v_proj,0.0000009709,0.05000,0.780
67
+ 9,self_attn.q_proj,0.0000055324,0.05000,0.792
68
+ 9,self_attn.o_proj,0.0000005803,0.05000,0.735
69
+ 9,mlp.gate_proj,0.0000134546,0.05000,0.759
70
+ 9,mlp.up_proj,0.0000123509,0.05000,0.757
71
+ 9,mlp.down_proj,0.0000025098,0.05000,2.140
72
+ 10,self_attn.k_proj,0.0000021740,0.05000,0.754
73
+ 10,self_attn.v_proj,0.0000012504,0.05000,0.730
74
+ 10,self_attn.q_proj,0.0000078099,0.05000,0.732
75
+ 10,self_attn.o_proj,0.0000005121,0.05000,0.733
76
+ 10,mlp.gate_proj,0.0000144171,0.05000,0.746
77
+ 10,mlp.up_proj,0.0000132418,0.05000,0.744
78
+ 10,mlp.down_proj,0.0000028267,0.05000,2.207
79
+ 11,self_attn.k_proj,0.0000016604,0.05000,0.734
80
+ 11,self_attn.v_proj,0.0000008726,0.05000,0.722
81
+ 11,self_attn.q_proj,0.0000059258,0.05000,0.732
82
+ 11,self_attn.o_proj,0.0000009211,0.05000,0.731
83
+ 11,mlp.gate_proj,0.0000186690,0.05000,0.759
84
+ 11,mlp.up_proj,0.0000150442,0.05000,0.784
85
+ 11,mlp.down_proj,0.0000025569,0.05000,2.264
86
+ 12,self_attn.k_proj,0.0000019593,0.05000,0.751
87
+ 12,self_attn.v_proj,0.0000009936,0.05000,0.725
88
+ 12,self_attn.q_proj,0.0000070199,0.05000,0.727
89
+ 12,self_attn.o_proj,0.0000009910,0.05000,0.792
90
+ 12,mlp.gate_proj,0.0000164469,0.05000,0.761
91
+ 12,mlp.up_proj,0.0000155571,0.05000,0.738
92
+ 12,mlp.down_proj,0.0000030347,0.05000,2.153
93
+ 13,self_attn.k_proj,0.0000020996,0.05000,0.737
94
+ 13,self_attn.v_proj,0.0000012002,0.05000,0.733
95
+ 13,self_attn.q_proj,0.0000074697,0.05000,0.725
96
+ 13,self_attn.o_proj,0.0000012893,0.05000,0.729
97
+ 13,mlp.gate_proj,0.0000188491,0.05000,0.746
98
+ 13,mlp.up_proj,0.0000178457,0.05000,0.738
99
+ 13,mlp.down_proj,0.0000037568,0.05000,2.205
100
+ 14,self_attn.k_proj,0.0000025165,0.05000,0.747
101
+ 14,self_attn.v_proj,0.0000012607,0.05000,0.723
102
+ 14,self_attn.q_proj,0.0000088147,0.05000,0.734
103
+ 14,self_attn.o_proj,0.0000012709,0.05000,0.739
104
+ 14,mlp.gate_proj,0.0000193264,0.05000,0.767
105
+ 14,mlp.up_proj,0.0000183729,0.05000,0.745
106
+ 14,mlp.down_proj,0.0000039256,0.05000,2.143
107
+ 15,self_attn.k_proj,0.0000021721,0.05000,0.753
108
+ 15,self_attn.v_proj,0.0000014329,0.05000,0.739
109
+ 15,self_attn.q_proj,0.0000079258,0.05000,0.728
110
+ 15,self_attn.o_proj,0.0000014436,0.05000,0.729
111
+ 15,mlp.gate_proj,0.0000211392,0.05000,0.770
112
+ 15,mlp.up_proj,0.0000192754,0.05000,0.818
113
+ 15,mlp.down_proj,0.0000039555,0.05000,2.196
114
+ 16,self_attn.k_proj,0.0000022858,0.05000,0.739
115
+ 16,self_attn.v_proj,0.0000010207,0.05000,0.734
116
+ 16,self_attn.q_proj,0.0000073921,0.05000,0.730
117
+ 16,self_attn.o_proj,0.0000014224,0.05000,0.757
118
+ 16,mlp.gate_proj,0.0000179755,0.05000,0.749
119
+ 16,mlp.up_proj,0.0000177250,0.05000,0.739
120
+ 16,mlp.down_proj,0.0000040355,0.05000,2.223
121
+ 17,self_attn.k_proj,0.0000025309,0.05000,0.805
122
+ 17,self_attn.v_proj,0.0000011885,0.05000,0.807
123
+ 17,self_attn.q_proj,0.0000087249,0.05000,0.795
124
+ 17,self_attn.o_proj,0.0000015970,0.05000,0.733
125
+ 17,mlp.gate_proj,0.0000173575,0.05000,0.785
126
+ 17,mlp.up_proj,0.0000176120,0.05000,0.752
127
+ 17,mlp.down_proj,0.0000037880,0.05000,2.169
128
+ 18,self_attn.k_proj,0.0000025784,0.05000,0.741
129
+ 18,self_attn.v_proj,0.0000013703,0.05000,0.721
130
+ 18,self_attn.q_proj,0.0000089820,0.05000,0.723
131
+ 18,self_attn.o_proj,0.0000020961,0.05000,0.751
132
+ 18,mlp.gate_proj,0.0000169104,0.05000,0.759
133
+ 18,mlp.up_proj,0.0000176954,0.05000,0.741
134
+ 18,mlp.down_proj,0.0000038492,0.05000,2.194
135
+ 19,self_attn.k_proj,0.0000028707,0.05000,0.753
136
+ 19,self_attn.v_proj,0.0000014709,0.05000,0.742
137
+ 19,self_attn.q_proj,0.0000108252,0.05000,0.732
138
+ 19,self_attn.o_proj,0.0000017332,0.05000,0.730
139
+ 19,mlp.gate_proj,0.0000177889,0.05000,0.790
140
+ 19,mlp.up_proj,0.0000186258,0.05000,0.760
141
+ 19,mlp.down_proj,0.0000041158,0.05000,2.156
142
+ 20,self_attn.k_proj,0.0000030751,0.05000,0.754
143
+ 20,self_attn.v_proj,0.0000013982,0.05000,0.721
144
+ 20,self_attn.q_proj,0.0000107140,0.05000,0.767
145
+ 20,self_attn.o_proj,0.0000024387,0.05000,0.806
146
+ 20,mlp.gate_proj,0.0000162837,0.05000,0.757
147
+ 20,mlp.up_proj,0.0000179382,0.05000,0.742
148
+ 20,mlp.down_proj,0.0000044658,0.05000,2.153
149
+ 21,self_attn.k_proj,0.0000029271,0.05000,0.758
150
+ 21,self_attn.v_proj,0.0000013779,0.05000,0.737
151
+ 21,self_attn.q_proj,0.0000100324,0.05000,0.735
152
+ 21,self_attn.o_proj,0.0000019044,0.05000,0.741
153
+ 21,mlp.gate_proj,0.0000166225,0.05000,0.753
154
+ 21,mlp.up_proj,0.0000184214,0.05000,0.754
155
+ 21,mlp.down_proj,0.0000041782,0.05000,2.331
156
+ 22,self_attn.k_proj,0.0000032301,0.05000,0.805
157
+ 22,self_attn.v_proj,0.0000020764,0.05000,0.792
158
+ 22,self_attn.q_proj,0.0000112834,0.05000,0.754
159
+ 22,self_attn.o_proj,0.0000028200,0.05000,0.753
160
+ 22,mlp.gate_proj,0.0000175082,0.05000,0.764
161
+ 22,mlp.up_proj,0.0000194985,0.05000,0.773
162
+ 22,mlp.down_proj,0.0000046281,0.05000,2.258
163
+ 23,self_attn.k_proj,0.0000030540,0.05000,0.742
164
+ 23,self_attn.v_proj,0.0000020901,0.05000,0.735
165
+ 23,self_attn.q_proj,0.0000114107,0.05000,0.728
166
+ 23,self_attn.o_proj,0.0000027048,0.05000,0.736
167
+ 23,mlp.gate_proj,0.0000191307,0.05000,0.806
168
+ 23,mlp.up_proj,0.0000202188,0.05000,0.796
169
+ 23,mlp.down_proj,0.0000047198,0.05000,2.213
170
+ 24,self_attn.k_proj,0.0000042195,0.05000,0.745
171
+ 24,self_attn.v_proj,0.0000018628,0.05000,0.723
172
+ 24,self_attn.q_proj,0.0000135178,0.05000,0.735
173
+ 24,self_attn.o_proj,0.0000032197,0.05000,0.766
174
+ 24,mlp.gate_proj,0.0000182919,0.05000,0.749
175
+ 24,mlp.up_proj,0.0000195997,0.05000,0.739
176
+ 24,mlp.down_proj,0.0000044227,0.05000,2.184
177
+ 25,self_attn.k_proj,0.0000043229,0.05000,0.743
178
+ 25,self_attn.v_proj,0.0000025822,0.05000,0.727
179
+ 25,self_attn.q_proj,0.0000154015,0.05000,0.743
180
+ 25,self_attn.o_proj,0.0000030529,0.05000,0.739
181
+ 25,mlp.gate_proj,0.0000188421,0.05000,0.764
182
+ 25,mlp.up_proj,0.0000209068,0.05000,0.753
183
+ 25,mlp.down_proj,0.0000051917,0.05000,2.159
184
+ 26,self_attn.k_proj,0.0000039396,0.05000,0.737
185
+ 26,self_attn.v_proj,0.0000017195,0.05000,0.725
186
+ 26,self_attn.q_proj,0.0000140131,0.05000,0.735
187
+ 26,self_attn.o_proj,0.0000025629,0.05000,0.744
188
+ 26,mlp.gate_proj,0.0000200527,0.05000,0.749
189
+ 26,mlp.up_proj,0.0000231107,0.05000,0.760
190
+ 26,mlp.down_proj,0.0000059867,0.05000,2.163
191
+ 27,self_attn.k_proj,0.0000042207,0.05000,0.740
192
+ 27,self_attn.v_proj,0.0000021844,0.05000,0.718
193
+ 27,self_attn.q_proj,0.0000140871,0.05000,0.722
194
+ 27,self_attn.o_proj,0.0000030243,0.05000,0.728
195
+ 27,mlp.gate_proj,0.0000213445,0.05000,0.746
196
+ 27,mlp.up_proj,0.0000247833,0.05000,0.765
197
+ 27,mlp.down_proj,0.0000070833,0.05000,2.175
198
+ 28,self_attn.k_proj,0.0000034175,0.05000,0.793
199
+ 28,self_attn.v_proj,0.0000030988,0.05000,0.782
200
+ 28,self_attn.q_proj,0.0000138590,0.05000,0.786
201
+ 28,self_attn.o_proj,0.0000040915,0.05000,0.733
202
+ 28,mlp.gate_proj,0.0000215709,0.05000,0.755
203
+ 28,mlp.up_proj,0.0000255722,0.05000,0.750
204
+ 28,mlp.down_proj,0.0000078348,0.05000,2.175
205
+ 29,self_attn.k_proj,0.0000045058,0.05000,0.736
206
+ 29,self_attn.v_proj,0.0000032128,0.05000,0.715
207
+ 29,self_attn.q_proj,0.0000166073,0.05000,0.717
208
+ 29,self_attn.o_proj,0.0000047115,0.05000,0.740
209
+ 29,mlp.gate_proj,0.0000226059,0.05000,0.753
210
+ 29,mlp.up_proj,0.0000267313,0.05000,0.750
211
+ 29,mlp.down_proj,0.0000088983,0.05000,2.230
212
+ 30,self_attn.k_proj,0.0000038593,0.05000,0.750
213
+ 30,self_attn.v_proj,0.0000034925,0.05000,0.730
214
+ 30,self_attn.q_proj,0.0000152342,0.05000,0.721
215
+ 30,self_attn.o_proj,0.0000048734,0.05000,0.739
216
+ 30,mlp.gate_proj,0.0000232500,0.05000,0.746
217
+ 30,mlp.up_proj,0.0000273514,0.05000,0.741
218
+ 30,mlp.down_proj,0.0000097457,0.05000,2.166
219
+ 31,self_attn.k_proj,0.0000038229,0.05000,0.741
220
+ 31,self_attn.v_proj,0.0000032801,0.05000,0.718
221
+ 31,self_attn.q_proj,0.0000150417,0.05000,0.725
222
+ 31,self_attn.o_proj,0.0000039066,0.05000,0.782
223
+ 31,mlp.gate_proj,0.0000263491,0.05000,0.763
224
+ 31,mlp.up_proj,0.0000302772,0.05000,0.742
225
+ 31,mlp.down_proj,0.0000114387,0.05000,2.172
226
+ 32,self_attn.k_proj,0.0000040586,0.05000,0.797
227
+ 32,self_attn.v_proj,0.0000040552,0.05000,0.761
228
+ 32,self_attn.q_proj,0.0000162478,0.05000,0.783
229
+ 32,self_attn.o_proj,0.0000036017,0.05000,0.793
230
+ 32,mlp.gate_proj,0.0000282540,0.05000,0.758
231
+ 32,mlp.up_proj,0.0000316693,0.05000,0.738
232
+ 32,mlp.down_proj,0.0000129634,0.05000,2.251
233
+ 33,self_attn.k_proj,0.0000041616,0.05000,0.776
234
+ 33,self_attn.v_proj,0.0000040587,0.05000,0.751
235
+ 33,self_attn.q_proj,0.0000171991,0.05000,0.744
236
+ 33,self_attn.o_proj,0.0000042980,0.05000,0.749
237
+ 33,mlp.gate_proj,0.0000341107,0.05000,0.749
238
+ 33,mlp.up_proj,0.0000376710,0.05000,0.742
239
+ 33,mlp.down_proj,0.0000190934,0.05000,2.153
240
+ 34,self_attn.k_proj,0.0000043125,0.05000,0.737
241
+ 34,self_attn.v_proj,0.0000048270,0.05000,0.731
242
+ 34,self_attn.q_proj,0.0000189533,0.05000,0.726
243
+ 34,self_attn.o_proj,0.0000045642,0.05000,0.731
244
+ 34,mlp.gate_proj,0.0000401084,0.05000,0.760
245
+ 34,mlp.up_proj,0.0000429217,0.05000,0.740
246
+ 34,mlp.down_proj,0.0000224412,0.05000,2.151
247
+ 35,self_attn.k_proj,0.0000041633,0.05000,0.734
248
+ 35,self_attn.v_proj,0.0000042589,0.05000,0.722
249
+ 35,self_attn.q_proj,0.0000169748,0.05000,0.723
250
+ 35,self_attn.o_proj,0.0000073256,0.05000,0.734
251
+ 35,mlp.gate_proj,0.0000451130,0.05000,0.776
252
+ 35,mlp.up_proj,0.0000470624,0.05000,0.740
253
+ 35,mlp.down_proj,0.0000268825,0.05000,2.152
254
+ 36,self_attn.k_proj,0.0000046333,0.05000,0.746
255
+ 36,self_attn.v_proj,0.0000074812,0.05000,0.726
256
+ 36,self_attn.q_proj,0.0000211151,0.05000,0.741
257
+ 36,self_attn.o_proj,0.0000063130,0.05000,0.786
258
+ 36,mlp.gate_proj,0.0000486521,0.05000,0.754
259
+ 36,mlp.up_proj,0.0000513203,0.05000,0.742
260
+ 36,mlp.down_proj,0.0000342591,0.05000,2.181
261
+ 37,self_attn.k_proj,0.0000050801,0.05000,0.752
262
+ 37,self_attn.v_proj,0.0000068690,0.05000,0.727
263
+ 37,self_attn.q_proj,0.0000210885,0.05000,0.727
264
+ 37,self_attn.o_proj,0.0000079530,0.05000,0.748
265
+ 37,mlp.gate_proj,0.0000523901,0.05000,0.762
266
+ 37,mlp.up_proj,0.0000550565,0.05000,0.746
267
+ 37,mlp.down_proj,0.0000368927,0.05000,2.235
268
+ 38,self_attn.k_proj,0.0000047046,0.05000,0.850
269
+ 38,self_attn.v_proj,0.0000067587,0.05000,0.736
270
+ 38,self_attn.q_proj,0.0000202120,0.05000,0.724
271
+ 38,self_attn.o_proj,0.0000061420,0.05000,0.729
272
+ 38,mlp.gate_proj,0.0000566031,0.05000,0.771
273
+ 38,mlp.up_proj,0.0000601863,0.05000,0.756
274
+ 38,mlp.down_proj,0.0000401681,0.05000,2.196
275
+ 39,self_attn.k_proj,0.0000043822,0.05000,0.738
276
+ 39,self_attn.v_proj,0.0000071958,0.05000,0.739
277
+ 39,self_attn.q_proj,0.0000199039,0.05000,0.743
278
+ 39,self_attn.o_proj,0.0000075891,0.05000,0.742
279
+ 39,mlp.gate_proj,0.0000607342,0.05000,0.815
280
+ 39,mlp.up_proj,0.0000651528,0.05000,0.820
281
+ 39,mlp.down_proj,0.0000479669,0.05000,2.146
282
+ 40,self_attn.k_proj,0.0000049208,0.05000,0.745
283
+ 40,self_attn.v_proj,0.0000095758,0.05000,0.754
284
+ 40,self_attn.q_proj,0.0000219568,0.05000,0.759
285
+ 40,self_attn.o_proj,0.0000064147,0.05000,0.744
286
+ 40,mlp.gate_proj,0.0000638470,0.05000,0.760
287
+ 40,mlp.up_proj,0.0000689741,0.05000,0.744
288
+ 40,mlp.down_proj,0.0000523432,0.05000,2.166
289
+ 41,self_attn.k_proj,0.0000045001,0.05000,0.827
290
+ 41,self_attn.v_proj,0.0000094180,0.05000,0.758
291
+ 41,self_attn.q_proj,0.0000212548,0.05000,0.730
292
+ 41,self_attn.o_proj,0.0000054054,0.05000,0.738
293
+ 41,mlp.gate_proj,0.0000669373,0.05000,0.772
294
+ 41,mlp.up_proj,0.0000732278,0.05000,0.770
295
+ 41,mlp.down_proj,0.0000576096,0.05000,2.163
296
+ 42,self_attn.k_proj,0.0000048940,0.05000,0.789
297
+ 42,self_attn.v_proj,0.0000106496,0.05000,0.776
298
+ 42,self_attn.q_proj,0.0000218831,0.05000,0.742
299
+ 42,self_attn.o_proj,0.0000062110,0.05000,0.749
300
+ 42,mlp.gate_proj,0.0000708179,0.05000,0.813
301
+ 42,mlp.up_proj,0.0000786422,0.05000,0.821
302
+ 42,mlp.down_proj,0.0000692101,0.05000,2.212
303
+ 43,self_attn.k_proj,0.0000049360,0.05000,0.769
304
+ 43,self_attn.v_proj,0.0000151946,0.05000,0.767
305
+ 43,self_attn.q_proj,0.0000240280,0.05000,0.763
306
+ 43,self_attn.o_proj,0.0000109670,0.05000,0.768
307
+ 43,mlp.gate_proj,0.0000772213,0.05000,0.843
308
+ 43,mlp.up_proj,0.0000874869,0.05000,1.152
309
+ 43,mlp.down_proj,0.0000823243,0.05000,2.178
310
+ 44,self_attn.k_proj,0.0000045238,0.05000,0.822
311
+ 44,self_attn.v_proj,0.0000168177,0.05000,0.804
312
+ 44,self_attn.q_proj,0.0000236080,0.05000,0.794
313
+ 44,self_attn.o_proj,0.0000180416,0.05000,0.747
314
+ 44,mlp.gate_proj,0.0000815393,0.05000,0.794
315
+ 44,mlp.up_proj,0.0000938600,0.05000,0.751
316
+ 44,mlp.down_proj,0.0001585995,0.05000,2.156
317
+ 45,self_attn.k_proj,0.0000048384,0.05000,0.740
318
+ 45,self_attn.v_proj,0.0000209304,0.05000,0.740
319
+ 45,self_attn.q_proj,0.0000253944,0.05000,0.727
320
+ 45,self_attn.o_proj,0.0000221834,0.05000,0.743
321
+ 45,mlp.gate_proj,0.0000914715,0.05000,0.753
322
+ 45,mlp.up_proj,0.0001040489,0.05000,0.749
323
+ 45,mlp.down_proj,0.0001830558,0.05000,2.216
324
+ 46,self_attn.k_proj,0.0000046951,0.05000,0.749
325
+ 46,self_attn.v_proj,0.0000239839,0.05000,0.734
326
+ 46,self_attn.q_proj,0.0000252321,0.05000,0.737
327
+ 46,self_attn.o_proj,0.0000349532,0.05000,0.732
328
+ 46,mlp.gate_proj,0.0001022066,0.05000,0.748
329
+ 46,mlp.up_proj,0.0001110833,0.05000,0.743
330
+ 46,mlp.down_proj,0.0003265005,0.05000,2.173
331
+ 47,self_attn.k_proj,0.0000042139,0.05000,0.738
332
+ 47,self_attn.v_proj,0.0000151991,0.05000,0.728
333
+ 47,self_attn.q_proj,0.0000205626,0.05000,0.719
334
+ 47,self_attn.o_proj,0.0000162146,0.05000,0.760
335
+ 47,mlp.gate_proj,0.0001253340,0.05000,0.781
336
+ 47,mlp.up_proj,0.0001317324,0.05000,0.740
337
+ 47,mlp.down_proj,0.0007685446,0.05000,2.138
quantize_config.json ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bits": 4,
3
+ "group_size": 128,
4
+ "desc_act": true,
5
+ "hyb_act": false,
6
+ "sym": true,
7
+ "lm_head": false,
8
+ "quant_method": "gptq",
9
+ "checkpoint_format": "gptq",
10
+ "pack_dtype": "int32",
11
+ "meta": {
12
+ "quantizer": [
13
+ "gptqmodel:4.0.0"
14
+ ],
15
+ "uri": "https://github.com/modelcloud/gptqmodel",
16
+ "damp_percent": 0.05,
17
+ "damp_auto_increment": 0.01,
18
+ "static_groups": false,
19
+ "true_sequential": true,
20
+ "mse": 0.0,
21
+ "v2": false,
22
+ "v2_alpha": 0.25
23
+ }
24
+ }
special_tokens_map.json ADDED
@@ -0,0 +1,25 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": [
3
+ "<|im_start|>",
4
+ "<|im_end|>",
5
+ "<|object_ref_start|>",
6
+ "<|object_ref_end|>",
7
+ "<|box_start|>",
8
+ "<|box_end|>",
9
+ "<|quad_start|>",
10
+ "<|quad_end|>",
11
+ "<|vision_start|>",
12
+ "<|vision_end|>",
13
+ "<|vision_pad|>",
14
+ "<|image_pad|>",
15
+ "<|video_pad|>"
16
+ ],
17
+ "eos_token": {
18
+ "content": "<|im_end|>",
19
+ "lstrip": false,
20
+ "normalized": false,
21
+ "rstrip": false,
22
+ "single_word": false
23
+ },
24
+ "pad_token": "<|fim_pad|>"
25
+ }
tokenizer.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9c5ae00e602b8860cbd784ba82a8aa14e8feecec692e7076590d014d7b7fdafa
3
+ size 11421896
tokenizer_config.json ADDED
@@ -0,0 +1,208 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_bos_token": false,
3
+ "add_prefix_space": false,
4
+ "added_tokens_decoder": {
5
+ "151643": {
6
+ "content": "<|endoftext|>",
7
+ "lstrip": false,
8
+ "normalized": false,
9
+ "rstrip": false,
10
+ "single_word": false,
11
+ "special": true
12
+ },
13
+ "151644": {
14
+ "content": "<|im_start|>",
15
+ "lstrip": false,
16
+ "normalized": false,
17
+ "rstrip": false,
18
+ "single_word": false,
19
+ "special": true
20
+ },
21
+ "151645": {
22
+ "content": "<|im_end|>",
23
+ "lstrip": false,
24
+ "normalized": false,
25
+ "rstrip": false,
26
+ "single_word": false,
27
+ "special": true
28
+ },
29
+ "151646": {
30
+ "content": "<|object_ref_start|>",
31
+ "lstrip": false,
32
+ "normalized": false,
33
+ "rstrip": false,
34
+ "single_word": false,
35
+ "special": true
36
+ },
37
+ "151647": {
38
+ "content": "<|object_ref_end|>",
39
+ "lstrip": false,
40
+ "normalized": false,
41
+ "rstrip": false,
42
+ "single_word": false,
43
+ "special": true
44
+ },
45
+ "151648": {
46
+ "content": "<|box_start|>",
47
+ "lstrip": false,
48
+ "normalized": false,
49
+ "rstrip": false,
50
+ "single_word": false,
51
+ "special": true
52
+ },
53
+ "151649": {
54
+ "content": "<|box_end|>",
55
+ "lstrip": false,
56
+ "normalized": false,
57
+ "rstrip": false,
58
+ "single_word": false,
59
+ "special": true
60
+ },
61
+ "151650": {
62
+ "content": "<|quad_start|>",
63
+ "lstrip": false,
64
+ "normalized": false,
65
+ "rstrip": false,
66
+ "single_word": false,
67
+ "special": true
68
+ },
69
+ "151651": {
70
+ "content": "<|quad_end|>",
71
+ "lstrip": false,
72
+ "normalized": false,
73
+ "rstrip": false,
74
+ "single_word": false,
75
+ "special": true
76
+ },
77
+ "151652": {
78
+ "content": "<|vision_start|>",
79
+ "lstrip": false,
80
+ "normalized": false,
81
+ "rstrip": false,
82
+ "single_word": false,
83
+ "special": true
84
+ },
85
+ "151653": {
86
+ "content": "<|vision_end|>",
87
+ "lstrip": false,
88
+ "normalized": false,
89
+ "rstrip": false,
90
+ "single_word": false,
91
+ "special": true
92
+ },
93
+ "151654": {
94
+ "content": "<|vision_pad|>",
95
+ "lstrip": false,
96
+ "normalized": false,
97
+ "rstrip": false,
98
+ "single_word": false,
99
+ "special": true
100
+ },
101
+ "151655": {
102
+ "content": "<|image_pad|>",
103
+ "lstrip": false,
104
+ "normalized": false,
105
+ "rstrip": false,
106
+ "single_word": false,
107
+ "special": true
108
+ },
109
+ "151656": {
110
+ "content": "<|video_pad|>",
111
+ "lstrip": false,
112
+ "normalized": false,
113
+ "rstrip": false,
114
+ "single_word": false,
115
+ "special": true
116
+ },
117
+ "151657": {
118
+ "content": "<tool_call>",
119
+ "lstrip": false,
120
+ "normalized": false,
121
+ "rstrip": false,
122
+ "single_word": false,
123
+ "special": false
124
+ },
125
+ "151658": {
126
+ "content": "</tool_call>",
127
+ "lstrip": false,
128
+ "normalized": false,
129
+ "rstrip": false,
130
+ "single_word": false,
131
+ "special": false
132
+ },
133
+ "151659": {
134
+ "content": "<|fim_prefix|>",
135
+ "lstrip": false,
136
+ "normalized": false,
137
+ "rstrip": false,
138
+ "single_word": false,
139
+ "special": false
140
+ },
141
+ "151660": {
142
+ "content": "<|fim_middle|>",
143
+ "lstrip": false,
144
+ "normalized": false,
145
+ "rstrip": false,
146
+ "single_word": false,
147
+ "special": false
148
+ },
149
+ "151661": {
150
+ "content": "<|fim_suffix|>",
151
+ "lstrip": false,
152
+ "normalized": false,
153
+ "rstrip": false,
154
+ "single_word": false,
155
+ "special": false
156
+ },
157
+ "151662": {
158
+ "content": "<|fim_pad|>",
159
+ "lstrip": false,
160
+ "normalized": false,
161
+ "rstrip": false,
162
+ "single_word": false,
163
+ "special": false
164
+ },
165
+ "151663": {
166
+ "content": "<|repo_name|>",
167
+ "lstrip": false,
168
+ "normalized": false,
169
+ "rstrip": false,
170
+ "single_word": false,
171
+ "special": false
172
+ },
173
+ "151664": {
174
+ "content": "<|file_sep|>",
175
+ "lstrip": false,
176
+ "normalized": false,
177
+ "rstrip": false,
178
+ "single_word": false,
179
+ "special": false
180
+ }
181
+ },
182
+ "additional_special_tokens": [
183
+ "<|im_start|>",
184
+ "<|im_end|>",
185
+ "<|object_ref_start|>",
186
+ "<|object_ref_end|>",
187
+ "<|box_start|>",
188
+ "<|box_end|>",
189
+ "<|quad_start|>",
190
+ "<|quad_end|>",
191
+ "<|vision_start|>",
192
+ "<|vision_end|>",
193
+ "<|vision_pad|>",
194
+ "<|image_pad|>",
195
+ "<|video_pad|>"
196
+ ],
197
+ "bos_token": null,
198
+ "clean_up_tokenization_spaces": false,
199
+ "eos_token": "<|im_end|>",
200
+ "errors": "replace",
201
+ "extra_special_tokens": {},
202
+ "model_max_length": 131072,
203
+ "pad_token": "<|fim_pad|>",
204
+ "split_special_tokens": false,
205
+ "tokenizer_class": "Qwen2TokenizerFast",
206
+ "unk_token": null,
207
+ "_commit_hash": null
208
+ }
vocab.json ADDED
The diff for this file is too large to render. See raw diff