Antimage01 commited on
Commit
405f887
·
verified ·
1 Parent(s): e4e1d8d

Add files using upload-large-folder tool

Browse files
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ tokenizer.json filter=lfs diff=lfs merge=lfs -text
added_tokens.json ADDED
@@ -0,0 +1,25 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "</tool_call>": 151658,
3
+ "<tool_call>": 151657,
4
+ "<|box_end|>": 151649,
5
+ "<|box_start|>": 151648,
6
+ "<|endoftext|>": 151643,
7
+ "<|file_sep|>": 151664,
8
+ "<|fim_middle|>": 151660,
9
+ "<|fim_pad|>": 151662,
10
+ "<|fim_prefix|>": 151659,
11
+ "<|fim_suffix|>": 151661,
12
+ "<|im_end|>": 151645,
13
+ "<|im_start|>": 151644,
14
+ "<|image_pad|>": 151655,
15
+ "<|image|>": 151665,
16
+ "<|object_ref_end|>": 151647,
17
+ "<|object_ref_start|>": 151646,
18
+ "<|quad_end|>": 151651,
19
+ "<|quad_start|>": 151650,
20
+ "<|repo_name|>": 151663,
21
+ "<|video_pad|>": 151656,
22
+ "<|vision_end|>": 151653,
23
+ "<|vision_pad|>": 151654,
24
+ "<|vision_start|>": 151652
25
+ }
chat_template.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ {
2
+ "chat_template": "{%- if tools %}\n {{- '<|im_start|>system\\n' }}\n {%- if messages[0]['role'] == 'system' %}\n {{- messages[0]['content'] }}\n {%- else %}\n {{- 'Please reason step by step, and put your final answer within \\\\boxed{}.' }}\n {%- endif %}\n {{- \"\\n\\n# Tools\\n\\nYou may call one or more functions to assist with the user query.\\n\\nYou are provided with function signatures within <tools></tools> XML tags:\\n<tools>\" }}\n {%- for tool in tools %}\n {{- \"\\n\" }}\n {{- tool | tojson }}\n {%- endfor %}\n {{- \"\\n</tools>\\n\\nFor each function call, return a json object with function name and arguments within <tool_call></tool_call> XML tags:\\n<tool_call>\\n{{\\\"name\\\": <function-name>, \\\"arguments\\\": <args-json-object>}}\\n</tool_call><|im_end|>\\n\" }}\n{%- else %}\n {%- if messages[0]['role'] == 'system' %}\n {{- '<|im_start|>system\\n' + messages[0]['content'] + '<|im_end|>\\n' }}\n {%- else %}\n {{- '<|im_start|>system\\nPlease reason step by step, and put your final answer within \\\\boxed{}.<|im_end|>\\n' }}\n {%- endif %}\n{%- endif %}\n{%- for message in messages %}\n {%- if (message.role == \"user\") or (message.role == \"system\" and not loop.first) or (message.role == \"assistant\" and not message.tool_calls) %}\n {{- '<|im_start|>' + message.role + '\\n' + message.content + '<|im_end|>' + '\\n' }}\n {%- elif message.role == \"assistant\" %}\n {{- '<|im_start|>' + message.role }}\n {%- if message.content %}\n {{- '\\n' + message.content }}\n {%- endif %}\n {%- for tool_call in message.tool_calls %}\n {%- if tool_call.function is defined %}\n {%- set tool_call = tool_call.function %}\n {%- endif %}\n {{- '\\n<tool_call>\\n{\"name\": \"' }}\n {{- tool_call.name }}\n {{- '\", \"arguments\": ' }}\n {{- tool_call.arguments | tojson }}\n {{- '}\\n</tool_call>' }}\n {%- endfor %}\n {{- '<|im_end|>\\n' }}\n {%- elif message.role == \"tool\" %}\n {%- if (loop.index0 == 0) or (messages[loop.index0 - 1].role != \"tool\") %}\n {{- '<|im_start|>user' }}\n {%- endif %}\n {{- '\\n<tool_response>\\n' }}\n {{- message.content }}\n {{- '\\n</tool_response>' }}\n {%- if loop.last or (messages[loop.index0 + 1].role != \"tool\") %}\n {{- '<|im_end|>\\n' }}\n {%- endif %}\n {%- endif %}\n{%- endfor %}\n{%- if add_generation_prompt %}\n {{- '<|im_start|>assistant\\n' }}\n{%- endif %}\n"
3
+ }
config.json ADDED
@@ -0,0 +1,148 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "URSA-8B-PS-GRPO",
3
+ "aligner_config": {
4
+ "_name_or_path": "",
5
+ "add_cross_attention": false,
6
+ "architectures": null,
7
+ "bad_words_ids": null,
8
+ "begin_suppress_tokens": null,
9
+ "bos_token_id": null,
10
+ "chunk_size_feed_forward": 0,
11
+ "cls": "MlpProjector",
12
+ "cross_attention_hidden_size": null,
13
+ "decoder_start_token_id": null,
14
+ "diversity_penalty": 0.0,
15
+ "do_sample": false,
16
+ "early_stopping": false,
17
+ "encoder_no_repeat_ngram_size": 0,
18
+ "eos_token_id": null,
19
+ "exponential_decay_length_penalty": null,
20
+ "finetuning_task": null,
21
+ "forced_bos_token_id": null,
22
+ "forced_eos_token_id": null,
23
+ "id2label": {
24
+ "0": "LABEL_0",
25
+ "1": "LABEL_1"
26
+ },
27
+ "is_decoder": false,
28
+ "is_encoder_decoder": false,
29
+ "label2id": {
30
+ "LABEL_0": 0,
31
+ "LABEL_1": 1
32
+ },
33
+ "length_penalty": 1.0,
34
+ "max_length": 20,
35
+ "min_length": 0,
36
+ "model_type": "aligner",
37
+ "no_repeat_ngram_size": 0,
38
+ "num_beam_groups": 1,
39
+ "num_beams": 1,
40
+ "num_return_sequences": 1,
41
+ "output_attentions": false,
42
+ "output_hidden_states": false,
43
+ "output_scores": false,
44
+ "pad_token_id": null,
45
+ "params": {
46
+ "depth": 2,
47
+ "input_dim": 1024,
48
+ "n_embed": 3584,
49
+ "projector_type": "low_high_hybrid_split_mlp_gelu"
50
+ },
51
+ "prefix": null,
52
+ "problem_type": null,
53
+ "pruned_heads": {},
54
+ "remove_invalid_values": false,
55
+ "repetition_penalty": 1.0,
56
+ "return_dict": true,
57
+ "return_dict_in_generate": false,
58
+ "sep_token_id": null,
59
+ "suppress_tokens": null,
60
+ "task_specific_params": null,
61
+ "temperature": 1.0,
62
+ "tf_legacy_loss": false,
63
+ "tie_encoder_decoder": false,
64
+ "tie_word_embeddings": true,
65
+ "tokenizer_class": null,
66
+ "top_k": 50,
67
+ "top_p": 1.0,
68
+ "torch_dtype": null,
69
+ "torchscript": false,
70
+ "typical_p": 1.0,
71
+ "use_bfloat16": false
72
+ },
73
+ "architectures": [
74
+ "UrsaForConditionalGeneration"
75
+ ],
76
+ "ignore_index": -100,
77
+ "image_token_index": 151665,
78
+ "model_type": "ursa",
79
+ "projector_hidden_act": "gelu",
80
+ "text_config": {
81
+ "architectures": [
82
+ "Qwen2ForCausalLM"
83
+ ],
84
+ "bos_token_id": 151643,
85
+ "eos_token_id": 151645,
86
+ "hidden_size": 3584,
87
+ "intermediate_size": 18944,
88
+ "max_position_embeddings": 4096,
89
+ "max_window_layers": 28,
90
+ "model_type": "qwen2",
91
+ "num_attention_heads": 28,
92
+ "num_hidden_layers": 28,
93
+ "num_key_value_heads": 4,
94
+ "sliding_window": null,
95
+ "torch_dtype": "bfloat16",
96
+ "use_sliding_window": false,
97
+ "vocab_size": 152064
98
+ },
99
+ "torch_dtype": "float32",
100
+ "transformers_version": "4.45.0",
101
+ "vision_config": {
102
+ "cls": "HybridVisionTower",
103
+ "model_type": "vision",
104
+ "params": {
105
+ "concat_type": "tuple",
106
+ "freeze_high": true,
107
+ "freeze_low": true,
108
+ "high_res_cfg": {
109
+ "ckpt_path": "",
110
+ "image_size": 1024,
111
+ "model_name": "sam_b_downsample",
112
+ "output_dim": 1024,
113
+ "pixel_mean": [
114
+ 0.48145466,
115
+ 0.4578275,
116
+ 0.40821073
117
+ ],
118
+ "pixel_std": [
119
+ 0.26862954,
120
+ 0.26130258,
121
+ 0.27577711
122
+ ],
123
+ "select_feature": "same",
124
+ "select_layer": -1
125
+ },
126
+ "low_res_cfg": {
127
+ "ckpt_path": "",
128
+ "image_size": 384,
129
+ "model_name": "siglip_large_patch16_384",
130
+ "output_dim": 1024,
131
+ "pixel_mean": [
132
+ 0.5,
133
+ 0.5,
134
+ 0.5
135
+ ],
136
+ "pixel_std": [
137
+ 0.5,
138
+ 0.5,
139
+ 0.5
140
+ ],
141
+ "select_feature": "same",
142
+ "select_layer": -1
143
+ }
144
+ }
145
+ },
146
+ "vision_feature_layer": -2,
147
+ "vision_feature_select_strategy": "default"
148
+ }
generation_config.json ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 151643,
4
+ "eos_token_id": 151645,
5
+ "transformers_version": "4.45.0"
6
+ }
merges.txt ADDED
The diff for this file is too large to render. See raw diff
 
model-00001-of-00007.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:be0236e1205629ed21db2afb354b764ded8968a6ae465283caf8bb87f25dba81
3
+ size 4946419388
model-00002-of-00007.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4fb3a1c2494f7b8542b30784db29d8f733c7d817e11751a0e00f4aeaeba3cbee
3
+ size 4932744848
model-00003-of-00007.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:19ec89acbdb981b8e18c8b4f5fb59e8fe250f3e4c822c3bb19e8cc4c4483d078
3
+ size 4932744856
model-00004-of-00007.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:95b8b6dc3a6ca97f1ac8f1bf3eff6e1608cbbddec78c1a1b6306a852368cdbaf
3
+ size 4998853328
model-00005-of-00007.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:616f80ce18a595139bfb8594fcce8e7c73846ad1e9d9f443874f48cd3cf14850
3
+ size 4984125264
model-00006-of-00007.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:88363566ce40c2e00f486d64b36ad5ed317ed12054e7d776cc112e31eb6dc74d
3
+ size 4932744904
model-00007-of-00007.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a6d3bd768eedf155294d6002f4443dff1300f1b9c95fd01ff9d4a86307d73703
3
+ size 2451614336
model.safetensors.index.json ADDED
@@ -0,0 +1,848 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "metadata": {
3
+ "total_size": 32179133444
4
+ },
5
+ "weight_map": {
6
+ "aligner.high_up_proj.bias": "model-00001-of-00007.safetensors",
7
+ "aligner.high_up_proj.weight": "model-00001-of-00007.safetensors",
8
+ "aligner.layers.1.bias": "model-00001-of-00007.safetensors",
9
+ "aligner.layers.1.weight": "model-00001-of-00007.safetensors",
10
+ "aligner.low_up_proj.bias": "model-00001-of-00007.safetensors",
11
+ "aligner.low_up_proj.weight": "model-00001-of-00007.safetensors",
12
+ "language_model.lm_head.weight": "model-00007-of-00007.safetensors",
13
+ "language_model.model.embed_tokens.weight": "model-00001-of-00007.safetensors",
14
+ "language_model.model.layers.0.input_layernorm.weight": "model-00001-of-00007.safetensors",
15
+ "language_model.model.layers.0.mlp.down_proj.weight": "model-00001-of-00007.safetensors",
16
+ "language_model.model.layers.0.mlp.gate_proj.weight": "model-00001-of-00007.safetensors",
17
+ "language_model.model.layers.0.mlp.up_proj.weight": "model-00001-of-00007.safetensors",
18
+ "language_model.model.layers.0.post_attention_layernorm.weight": "model-00001-of-00007.safetensors",
19
+ "language_model.model.layers.0.self_attn.k_proj.bias": "model-00001-of-00007.safetensors",
20
+ "language_model.model.layers.0.self_attn.k_proj.weight": "model-00001-of-00007.safetensors",
21
+ "language_model.model.layers.0.self_attn.o_proj.weight": "model-00001-of-00007.safetensors",
22
+ "language_model.model.layers.0.self_attn.q_proj.bias": "model-00001-of-00007.safetensors",
23
+ "language_model.model.layers.0.self_attn.q_proj.weight": "model-00001-of-00007.safetensors",
24
+ "language_model.model.layers.0.self_attn.v_proj.bias": "model-00001-of-00007.safetensors",
25
+ "language_model.model.layers.0.self_attn.v_proj.weight": "model-00001-of-00007.safetensors",
26
+ "language_model.model.layers.1.input_layernorm.weight": "model-00002-of-00007.safetensors",
27
+ "language_model.model.layers.1.mlp.down_proj.weight": "model-00002-of-00007.safetensors",
28
+ "language_model.model.layers.1.mlp.gate_proj.weight": "model-00002-of-00007.safetensors",
29
+ "language_model.model.layers.1.mlp.up_proj.weight": "model-00002-of-00007.safetensors",
30
+ "language_model.model.layers.1.post_attention_layernorm.weight": "model-00002-of-00007.safetensors",
31
+ "language_model.model.layers.1.self_attn.k_proj.bias": "model-00001-of-00007.safetensors",
32
+ "language_model.model.layers.1.self_attn.k_proj.weight": "model-00001-of-00007.safetensors",
33
+ "language_model.model.layers.1.self_attn.o_proj.weight": "model-00001-of-00007.safetensors",
34
+ "language_model.model.layers.1.self_attn.q_proj.bias": "model-00001-of-00007.safetensors",
35
+ "language_model.model.layers.1.self_attn.q_proj.weight": "model-00001-of-00007.safetensors",
36
+ "language_model.model.layers.1.self_attn.v_proj.bias": "model-00001-of-00007.safetensors",
37
+ "language_model.model.layers.1.self_attn.v_proj.weight": "model-00001-of-00007.safetensors",
38
+ "language_model.model.layers.10.input_layernorm.weight": "model-00003-of-00007.safetensors",
39
+ "language_model.model.layers.10.mlp.down_proj.weight": "model-00003-of-00007.safetensors",
40
+ "language_model.model.layers.10.mlp.gate_proj.weight": "model-00003-of-00007.safetensors",
41
+ "language_model.model.layers.10.mlp.up_proj.weight": "model-00003-of-00007.safetensors",
42
+ "language_model.model.layers.10.post_attention_layernorm.weight": "model-00003-of-00007.safetensors",
43
+ "language_model.model.layers.10.self_attn.k_proj.bias": "model-00003-of-00007.safetensors",
44
+ "language_model.model.layers.10.self_attn.k_proj.weight": "model-00003-of-00007.safetensors",
45
+ "language_model.model.layers.10.self_attn.o_proj.weight": "model-00003-of-00007.safetensors",
46
+ "language_model.model.layers.10.self_attn.q_proj.bias": "model-00003-of-00007.safetensors",
47
+ "language_model.model.layers.10.self_attn.q_proj.weight": "model-00003-of-00007.safetensors",
48
+ "language_model.model.layers.10.self_attn.v_proj.bias": "model-00003-of-00007.safetensors",
49
+ "language_model.model.layers.10.self_attn.v_proj.weight": "model-00003-of-00007.safetensors",
50
+ "language_model.model.layers.11.input_layernorm.weight": "model-00004-of-00007.safetensors",
51
+ "language_model.model.layers.11.mlp.down_proj.weight": "model-00004-of-00007.safetensors",
52
+ "language_model.model.layers.11.mlp.gate_proj.weight": "model-00003-of-00007.safetensors",
53
+ "language_model.model.layers.11.mlp.up_proj.weight": "model-00003-of-00007.safetensors",
54
+ "language_model.model.layers.11.post_attention_layernorm.weight": "model-00004-of-00007.safetensors",
55
+ "language_model.model.layers.11.self_attn.k_proj.bias": "model-00003-of-00007.safetensors",
56
+ "language_model.model.layers.11.self_attn.k_proj.weight": "model-00003-of-00007.safetensors",
57
+ "language_model.model.layers.11.self_attn.o_proj.weight": "model-00003-of-00007.safetensors",
58
+ "language_model.model.layers.11.self_attn.q_proj.bias": "model-00003-of-00007.safetensors",
59
+ "language_model.model.layers.11.self_attn.q_proj.weight": "model-00003-of-00007.safetensors",
60
+ "language_model.model.layers.11.self_attn.v_proj.bias": "model-00003-of-00007.safetensors",
61
+ "language_model.model.layers.11.self_attn.v_proj.weight": "model-00003-of-00007.safetensors",
62
+ "language_model.model.layers.12.input_layernorm.weight": "model-00004-of-00007.safetensors",
63
+ "language_model.model.layers.12.mlp.down_proj.weight": "model-00004-of-00007.safetensors",
64
+ "language_model.model.layers.12.mlp.gate_proj.weight": "model-00004-of-00007.safetensors",
65
+ "language_model.model.layers.12.mlp.up_proj.weight": "model-00004-of-00007.safetensors",
66
+ "language_model.model.layers.12.post_attention_layernorm.weight": "model-00004-of-00007.safetensors",
67
+ "language_model.model.layers.12.self_attn.k_proj.bias": "model-00004-of-00007.safetensors",
68
+ "language_model.model.layers.12.self_attn.k_proj.weight": "model-00004-of-00007.safetensors",
69
+ "language_model.model.layers.12.self_attn.o_proj.weight": "model-00004-of-00007.safetensors",
70
+ "language_model.model.layers.12.self_attn.q_proj.bias": "model-00004-of-00007.safetensors",
71
+ "language_model.model.layers.12.self_attn.q_proj.weight": "model-00004-of-00007.safetensors",
72
+ "language_model.model.layers.12.self_attn.v_proj.bias": "model-00004-of-00007.safetensors",
73
+ "language_model.model.layers.12.self_attn.v_proj.weight": "model-00004-of-00007.safetensors",
74
+ "language_model.model.layers.13.input_layernorm.weight": "model-00004-of-00007.safetensors",
75
+ "language_model.model.layers.13.mlp.down_proj.weight": "model-00004-of-00007.safetensors",
76
+ "language_model.model.layers.13.mlp.gate_proj.weight": "model-00004-of-00007.safetensors",
77
+ "language_model.model.layers.13.mlp.up_proj.weight": "model-00004-of-00007.safetensors",
78
+ "language_model.model.layers.13.post_attention_layernorm.weight": "model-00004-of-00007.safetensors",
79
+ "language_model.model.layers.13.self_attn.k_proj.bias": "model-00004-of-00007.safetensors",
80
+ "language_model.model.layers.13.self_attn.k_proj.weight": "model-00004-of-00007.safetensors",
81
+ "language_model.model.layers.13.self_attn.o_proj.weight": "model-00004-of-00007.safetensors",
82
+ "language_model.model.layers.13.self_attn.q_proj.bias": "model-00004-of-00007.safetensors",
83
+ "language_model.model.layers.13.self_attn.q_proj.weight": "model-00004-of-00007.safetensors",
84
+ "language_model.model.layers.13.self_attn.v_proj.bias": "model-00004-of-00007.safetensors",
85
+ "language_model.model.layers.13.self_attn.v_proj.weight": "model-00004-of-00007.safetensors",
86
+ "language_model.model.layers.14.input_layernorm.weight": "model-00004-of-00007.safetensors",
87
+ "language_model.model.layers.14.mlp.down_proj.weight": "model-00004-of-00007.safetensors",
88
+ "language_model.model.layers.14.mlp.gate_proj.weight": "model-00004-of-00007.safetensors",
89
+ "language_model.model.layers.14.mlp.up_proj.weight": "model-00004-of-00007.safetensors",
90
+ "language_model.model.layers.14.post_attention_layernorm.weight": "model-00004-of-00007.safetensors",
91
+ "language_model.model.layers.14.self_attn.k_proj.bias": "model-00004-of-00007.safetensors",
92
+ "language_model.model.layers.14.self_attn.k_proj.weight": "model-00004-of-00007.safetensors",
93
+ "language_model.model.layers.14.self_attn.o_proj.weight": "model-00004-of-00007.safetensors",
94
+ "language_model.model.layers.14.self_attn.q_proj.bias": "model-00004-of-00007.safetensors",
95
+ "language_model.model.layers.14.self_attn.q_proj.weight": "model-00004-of-00007.safetensors",
96
+ "language_model.model.layers.14.self_attn.v_proj.bias": "model-00004-of-00007.safetensors",
97
+ "language_model.model.layers.14.self_attn.v_proj.weight": "model-00004-of-00007.safetensors",
98
+ "language_model.model.layers.15.input_layernorm.weight": "model-00004-of-00007.safetensors",
99
+ "language_model.model.layers.15.mlp.down_proj.weight": "model-00004-of-00007.safetensors",
100
+ "language_model.model.layers.15.mlp.gate_proj.weight": "model-00004-of-00007.safetensors",
101
+ "language_model.model.layers.15.mlp.up_proj.weight": "model-00004-of-00007.safetensors",
102
+ "language_model.model.layers.15.post_attention_layernorm.weight": "model-00004-of-00007.safetensors",
103
+ "language_model.model.layers.15.self_attn.k_proj.bias": "model-00004-of-00007.safetensors",
104
+ "language_model.model.layers.15.self_attn.k_proj.weight": "model-00004-of-00007.safetensors",
105
+ "language_model.model.layers.15.self_attn.o_proj.weight": "model-00004-of-00007.safetensors",
106
+ "language_model.model.layers.15.self_attn.q_proj.bias": "model-00004-of-00007.safetensors",
107
+ "language_model.model.layers.15.self_attn.q_proj.weight": "model-00004-of-00007.safetensors",
108
+ "language_model.model.layers.15.self_attn.v_proj.bias": "model-00004-of-00007.safetensors",
109
+ "language_model.model.layers.15.self_attn.v_proj.weight": "model-00004-of-00007.safetensors",
110
+ "language_model.model.layers.16.input_layernorm.weight": "model-00004-of-00007.safetensors",
111
+ "language_model.model.layers.16.mlp.down_proj.weight": "model-00004-of-00007.safetensors",
112
+ "language_model.model.layers.16.mlp.gate_proj.weight": "model-00004-of-00007.safetensors",
113
+ "language_model.model.layers.16.mlp.up_proj.weight": "model-00004-of-00007.safetensors",
114
+ "language_model.model.layers.16.post_attention_layernorm.weight": "model-00004-of-00007.safetensors",
115
+ "language_model.model.layers.16.self_attn.k_proj.bias": "model-00004-of-00007.safetensors",
116
+ "language_model.model.layers.16.self_attn.k_proj.weight": "model-00004-of-00007.safetensors",
117
+ "language_model.model.layers.16.self_attn.o_proj.weight": "model-00004-of-00007.safetensors",
118
+ "language_model.model.layers.16.self_attn.q_proj.bias": "model-00004-of-00007.safetensors",
119
+ "language_model.model.layers.16.self_attn.q_proj.weight": "model-00004-of-00007.safetensors",
120
+ "language_model.model.layers.16.self_attn.v_proj.bias": "model-00004-of-00007.safetensors",
121
+ "language_model.model.layers.16.self_attn.v_proj.weight": "model-00004-of-00007.safetensors",
122
+ "language_model.model.layers.17.input_layernorm.weight": "model-00005-of-00007.safetensors",
123
+ "language_model.model.layers.17.mlp.down_proj.weight": "model-00005-of-00007.safetensors",
124
+ "language_model.model.layers.17.mlp.gate_proj.weight": "model-00005-of-00007.safetensors",
125
+ "language_model.model.layers.17.mlp.up_proj.weight": "model-00005-of-00007.safetensors",
126
+ "language_model.model.layers.17.post_attention_layernorm.weight": "model-00005-of-00007.safetensors",
127
+ "language_model.model.layers.17.self_attn.k_proj.bias": "model-00004-of-00007.safetensors",
128
+ "language_model.model.layers.17.self_attn.k_proj.weight": "model-00004-of-00007.safetensors",
129
+ "language_model.model.layers.17.self_attn.o_proj.weight": "model-00005-of-00007.safetensors",
130
+ "language_model.model.layers.17.self_attn.q_proj.bias": "model-00004-of-00007.safetensors",
131
+ "language_model.model.layers.17.self_attn.q_proj.weight": "model-00004-of-00007.safetensors",
132
+ "language_model.model.layers.17.self_attn.v_proj.bias": "model-00004-of-00007.safetensors",
133
+ "language_model.model.layers.17.self_attn.v_proj.weight": "model-00004-of-00007.safetensors",
134
+ "language_model.model.layers.18.input_layernorm.weight": "model-00005-of-00007.safetensors",
135
+ "language_model.model.layers.18.mlp.down_proj.weight": "model-00005-of-00007.safetensors",
136
+ "language_model.model.layers.18.mlp.gate_proj.weight": "model-00005-of-00007.safetensors",
137
+ "language_model.model.layers.18.mlp.up_proj.weight": "model-00005-of-00007.safetensors",
138
+ "language_model.model.layers.18.post_attention_layernorm.weight": "model-00005-of-00007.safetensors",
139
+ "language_model.model.layers.18.self_attn.k_proj.bias": "model-00005-of-00007.safetensors",
140
+ "language_model.model.layers.18.self_attn.k_proj.weight": "model-00005-of-00007.safetensors",
141
+ "language_model.model.layers.18.self_attn.o_proj.weight": "model-00005-of-00007.safetensors",
142
+ "language_model.model.layers.18.self_attn.q_proj.bias": "model-00005-of-00007.safetensors",
143
+ "language_model.model.layers.18.self_attn.q_proj.weight": "model-00005-of-00007.safetensors",
144
+ "language_model.model.layers.18.self_attn.v_proj.bias": "model-00005-of-00007.safetensors",
145
+ "language_model.model.layers.18.self_attn.v_proj.weight": "model-00005-of-00007.safetensors",
146
+ "language_model.model.layers.19.input_layernorm.weight": "model-00005-of-00007.safetensors",
147
+ "language_model.model.layers.19.mlp.down_proj.weight": "model-00005-of-00007.safetensors",
148
+ "language_model.model.layers.19.mlp.gate_proj.weight": "model-00005-of-00007.safetensors",
149
+ "language_model.model.layers.19.mlp.up_proj.weight": "model-00005-of-00007.safetensors",
150
+ "language_model.model.layers.19.post_attention_layernorm.weight": "model-00005-of-00007.safetensors",
151
+ "language_model.model.layers.19.self_attn.k_proj.bias": "model-00005-of-00007.safetensors",
152
+ "language_model.model.layers.19.self_attn.k_proj.weight": "model-00005-of-00007.safetensors",
153
+ "language_model.model.layers.19.self_attn.o_proj.weight": "model-00005-of-00007.safetensors",
154
+ "language_model.model.layers.19.self_attn.q_proj.bias": "model-00005-of-00007.safetensors",
155
+ "language_model.model.layers.19.self_attn.q_proj.weight": "model-00005-of-00007.safetensors",
156
+ "language_model.model.layers.19.self_attn.v_proj.bias": "model-00005-of-00007.safetensors",
157
+ "language_model.model.layers.19.self_attn.v_proj.weight": "model-00005-of-00007.safetensors",
158
+ "language_model.model.layers.2.input_layernorm.weight": "model-00002-of-00007.safetensors",
159
+ "language_model.model.layers.2.mlp.down_proj.weight": "model-00002-of-00007.safetensors",
160
+ "language_model.model.layers.2.mlp.gate_proj.weight": "model-00002-of-00007.safetensors",
161
+ "language_model.model.layers.2.mlp.up_proj.weight": "model-00002-of-00007.safetensors",
162
+ "language_model.model.layers.2.post_attention_layernorm.weight": "model-00002-of-00007.safetensors",
163
+ "language_model.model.layers.2.self_attn.k_proj.bias": "model-00002-of-00007.safetensors",
164
+ "language_model.model.layers.2.self_attn.k_proj.weight": "model-00002-of-00007.safetensors",
165
+ "language_model.model.layers.2.self_attn.o_proj.weight": "model-00002-of-00007.safetensors",
166
+ "language_model.model.layers.2.self_attn.q_proj.bias": "model-00002-of-00007.safetensors",
167
+ "language_model.model.layers.2.self_attn.q_proj.weight": "model-00002-of-00007.safetensors",
168
+ "language_model.model.layers.2.self_attn.v_proj.bias": "model-00002-of-00007.safetensors",
169
+ "language_model.model.layers.2.self_attn.v_proj.weight": "model-00002-of-00007.safetensors",
170
+ "language_model.model.layers.20.input_layernorm.weight": "model-00005-of-00007.safetensors",
171
+ "language_model.model.layers.20.mlp.down_proj.weight": "model-00005-of-00007.safetensors",
172
+ "language_model.model.layers.20.mlp.gate_proj.weight": "model-00005-of-00007.safetensors",
173
+ "language_model.model.layers.20.mlp.up_proj.weight": "model-00005-of-00007.safetensors",
174
+ "language_model.model.layers.20.post_attention_layernorm.weight": "model-00005-of-00007.safetensors",
175
+ "language_model.model.layers.20.self_attn.k_proj.bias": "model-00005-of-00007.safetensors",
176
+ "language_model.model.layers.20.self_attn.k_proj.weight": "model-00005-of-00007.safetensors",
177
+ "language_model.model.layers.20.self_attn.o_proj.weight": "model-00005-of-00007.safetensors",
178
+ "language_model.model.layers.20.self_attn.q_proj.bias": "model-00005-of-00007.safetensors",
179
+ "language_model.model.layers.20.self_attn.q_proj.weight": "model-00005-of-00007.safetensors",
180
+ "language_model.model.layers.20.self_attn.v_proj.bias": "model-00005-of-00007.safetensors",
181
+ "language_model.model.layers.20.self_attn.v_proj.weight": "model-00005-of-00007.safetensors",
182
+ "language_model.model.layers.21.input_layernorm.weight": "model-00005-of-00007.safetensors",
183
+ "language_model.model.layers.21.mlp.down_proj.weight": "model-00005-of-00007.safetensors",
184
+ "language_model.model.layers.21.mlp.gate_proj.weight": "model-00005-of-00007.safetensors",
185
+ "language_model.model.layers.21.mlp.up_proj.weight": "model-00005-of-00007.safetensors",
186
+ "language_model.model.layers.21.post_attention_layernorm.weight": "model-00005-of-00007.safetensors",
187
+ "language_model.model.layers.21.self_attn.k_proj.bias": "model-00005-of-00007.safetensors",
188
+ "language_model.model.layers.21.self_attn.k_proj.weight": "model-00005-of-00007.safetensors",
189
+ "language_model.model.layers.21.self_attn.o_proj.weight": "model-00005-of-00007.safetensors",
190
+ "language_model.model.layers.21.self_attn.q_proj.bias": "model-00005-of-00007.safetensors",
191
+ "language_model.model.layers.21.self_attn.q_proj.weight": "model-00005-of-00007.safetensors",
192
+ "language_model.model.layers.21.self_attn.v_proj.bias": "model-00005-of-00007.safetensors",
193
+ "language_model.model.layers.21.self_attn.v_proj.weight": "model-00005-of-00007.safetensors",
194
+ "language_model.model.layers.22.input_layernorm.weight": "model-00006-of-00007.safetensors",
195
+ "language_model.model.layers.22.mlp.down_proj.weight": "model-00006-of-00007.safetensors",
196
+ "language_model.model.layers.22.mlp.gate_proj.weight": "model-00005-of-00007.safetensors",
197
+ "language_model.model.layers.22.mlp.up_proj.weight": "model-00006-of-00007.safetensors",
198
+ "language_model.model.layers.22.post_attention_layernorm.weight": "model-00006-of-00007.safetensors",
199
+ "language_model.model.layers.22.self_attn.k_proj.bias": "model-00005-of-00007.safetensors",
200
+ "language_model.model.layers.22.self_attn.k_proj.weight": "model-00005-of-00007.safetensors",
201
+ "language_model.model.layers.22.self_attn.o_proj.weight": "model-00005-of-00007.safetensors",
202
+ "language_model.model.layers.22.self_attn.q_proj.bias": "model-00005-of-00007.safetensors",
203
+ "language_model.model.layers.22.self_attn.q_proj.weight": "model-00005-of-00007.safetensors",
204
+ "language_model.model.layers.22.self_attn.v_proj.bias": "model-00005-of-00007.safetensors",
205
+ "language_model.model.layers.22.self_attn.v_proj.weight": "model-00005-of-00007.safetensors",
206
+ "language_model.model.layers.23.input_layernorm.weight": "model-00006-of-00007.safetensors",
207
+ "language_model.model.layers.23.mlp.down_proj.weight": "model-00006-of-00007.safetensors",
208
+ "language_model.model.layers.23.mlp.gate_proj.weight": "model-00006-of-00007.safetensors",
209
+ "language_model.model.layers.23.mlp.up_proj.weight": "model-00006-of-00007.safetensors",
210
+ "language_model.model.layers.23.post_attention_layernorm.weight": "model-00006-of-00007.safetensors",
211
+ "language_model.model.layers.23.self_attn.k_proj.bias": "model-00006-of-00007.safetensors",
212
+ "language_model.model.layers.23.self_attn.k_proj.weight": "model-00006-of-00007.safetensors",
213
+ "language_model.model.layers.23.self_attn.o_proj.weight": "model-00006-of-00007.safetensors",
214
+ "language_model.model.layers.23.self_attn.q_proj.bias": "model-00006-of-00007.safetensors",
215
+ "language_model.model.layers.23.self_attn.q_proj.weight": "model-00006-of-00007.safetensors",
216
+ "language_model.model.layers.23.self_attn.v_proj.bias": "model-00006-of-00007.safetensors",
217
+ "language_model.model.layers.23.self_attn.v_proj.weight": "model-00006-of-00007.safetensors",
218
+ "language_model.model.layers.24.input_layernorm.weight": "model-00006-of-00007.safetensors",
219
+ "language_model.model.layers.24.mlp.down_proj.weight": "model-00006-of-00007.safetensors",
220
+ "language_model.model.layers.24.mlp.gate_proj.weight": "model-00006-of-00007.safetensors",
221
+ "language_model.model.layers.24.mlp.up_proj.weight": "model-00006-of-00007.safetensors",
222
+ "language_model.model.layers.24.post_attention_layernorm.weight": "model-00006-of-00007.safetensors",
223
+ "language_model.model.layers.24.self_attn.k_proj.bias": "model-00006-of-00007.safetensors",
224
+ "language_model.model.layers.24.self_attn.k_proj.weight": "model-00006-of-00007.safetensors",
225
+ "language_model.model.layers.24.self_attn.o_proj.weight": "model-00006-of-00007.safetensors",
226
+ "language_model.model.layers.24.self_attn.q_proj.bias": "model-00006-of-00007.safetensors",
227
+ "language_model.model.layers.24.self_attn.q_proj.weight": "model-00006-of-00007.safetensors",
228
+ "language_model.model.layers.24.self_attn.v_proj.bias": "model-00006-of-00007.safetensors",
229
+ "language_model.model.layers.24.self_attn.v_proj.weight": "model-00006-of-00007.safetensors",
230
+ "language_model.model.layers.25.input_layernorm.weight": "model-00006-of-00007.safetensors",
231
+ "language_model.model.layers.25.mlp.down_proj.weight": "model-00006-of-00007.safetensors",
232
+ "language_model.model.layers.25.mlp.gate_proj.weight": "model-00006-of-00007.safetensors",
233
+ "language_model.model.layers.25.mlp.up_proj.weight": "model-00006-of-00007.safetensors",
234
+ "language_model.model.layers.25.post_attention_layernorm.weight": "model-00006-of-00007.safetensors",
235
+ "language_model.model.layers.25.self_attn.k_proj.bias": "model-00006-of-00007.safetensors",
236
+ "language_model.model.layers.25.self_attn.k_proj.weight": "model-00006-of-00007.safetensors",
237
+ "language_model.model.layers.25.self_attn.o_proj.weight": "model-00006-of-00007.safetensors",
238
+ "language_model.model.layers.25.self_attn.q_proj.bias": "model-00006-of-00007.safetensors",
239
+ "language_model.model.layers.25.self_attn.q_proj.weight": "model-00006-of-00007.safetensors",
240
+ "language_model.model.layers.25.self_attn.v_proj.bias": "model-00006-of-00007.safetensors",
241
+ "language_model.model.layers.25.self_attn.v_proj.weight": "model-00006-of-00007.safetensors",
242
+ "language_model.model.layers.26.input_layernorm.weight": "model-00006-of-00007.safetensors",
243
+ "language_model.model.layers.26.mlp.down_proj.weight": "model-00006-of-00007.safetensors",
244
+ "language_model.model.layers.26.mlp.gate_proj.weight": "model-00006-of-00007.safetensors",
245
+ "language_model.model.layers.26.mlp.up_proj.weight": "model-00006-of-00007.safetensors",
246
+ "language_model.model.layers.26.post_attention_layernorm.weight": "model-00006-of-00007.safetensors",
247
+ "language_model.model.layers.26.self_attn.k_proj.bias": "model-00006-of-00007.safetensors",
248
+ "language_model.model.layers.26.self_attn.k_proj.weight": "model-00006-of-00007.safetensors",
249
+ "language_model.model.layers.26.self_attn.o_proj.weight": "model-00006-of-00007.safetensors",
250
+ "language_model.model.layers.26.self_attn.q_proj.bias": "model-00006-of-00007.safetensors",
251
+ "language_model.model.layers.26.self_attn.q_proj.weight": "model-00006-of-00007.safetensors",
252
+ "language_model.model.layers.26.self_attn.v_proj.bias": "model-00006-of-00007.safetensors",
253
+ "language_model.model.layers.26.self_attn.v_proj.weight": "model-00006-of-00007.safetensors",
254
+ "language_model.model.layers.27.input_layernorm.weight": "model-00007-of-00007.safetensors",
255
+ "language_model.model.layers.27.mlp.down_proj.weight": "model-00007-of-00007.safetensors",
256
+ "language_model.model.layers.27.mlp.gate_proj.weight": "model-00006-of-00007.safetensors",
257
+ "language_model.model.layers.27.mlp.up_proj.weight": "model-00006-of-00007.safetensors",
258
+ "language_model.model.layers.27.post_attention_layernorm.weight": "model-00007-of-00007.safetensors",
259
+ "language_model.model.layers.27.self_attn.k_proj.bias": "model-00006-of-00007.safetensors",
260
+ "language_model.model.layers.27.self_attn.k_proj.weight": "model-00006-of-00007.safetensors",
261
+ "language_model.model.layers.27.self_attn.o_proj.weight": "model-00006-of-00007.safetensors",
262
+ "language_model.model.layers.27.self_attn.q_proj.bias": "model-00006-of-00007.safetensors",
263
+ "language_model.model.layers.27.self_attn.q_proj.weight": "model-00006-of-00007.safetensors",
264
+ "language_model.model.layers.27.self_attn.v_proj.bias": "model-00006-of-00007.safetensors",
265
+ "language_model.model.layers.27.self_attn.v_proj.weight": "model-00006-of-00007.safetensors",
266
+ "language_model.model.layers.3.input_layernorm.weight": "model-00002-of-00007.safetensors",
267
+ "language_model.model.layers.3.mlp.down_proj.weight": "model-00002-of-00007.safetensors",
268
+ "language_model.model.layers.3.mlp.gate_proj.weight": "model-00002-of-00007.safetensors",
269
+ "language_model.model.layers.3.mlp.up_proj.weight": "model-00002-of-00007.safetensors",
270
+ "language_model.model.layers.3.post_attention_layernorm.weight": "model-00002-of-00007.safetensors",
271
+ "language_model.model.layers.3.self_attn.k_proj.bias": "model-00002-of-00007.safetensors",
272
+ "language_model.model.layers.3.self_attn.k_proj.weight": "model-00002-of-00007.safetensors",
273
+ "language_model.model.layers.3.self_attn.o_proj.weight": "model-00002-of-00007.safetensors",
274
+ "language_model.model.layers.3.self_attn.q_proj.bias": "model-00002-of-00007.safetensors",
275
+ "language_model.model.layers.3.self_attn.q_proj.weight": "model-00002-of-00007.safetensors",
276
+ "language_model.model.layers.3.self_attn.v_proj.bias": "model-00002-of-00007.safetensors",
277
+ "language_model.model.layers.3.self_attn.v_proj.weight": "model-00002-of-00007.safetensors",
278
+ "language_model.model.layers.4.input_layernorm.weight": "model-00002-of-00007.safetensors",
279
+ "language_model.model.layers.4.mlp.down_proj.weight": "model-00002-of-00007.safetensors",
280
+ "language_model.model.layers.4.mlp.gate_proj.weight": "model-00002-of-00007.safetensors",
281
+ "language_model.model.layers.4.mlp.up_proj.weight": "model-00002-of-00007.safetensors",
282
+ "language_model.model.layers.4.post_attention_layernorm.weight": "model-00002-of-00007.safetensors",
283
+ "language_model.model.layers.4.self_attn.k_proj.bias": "model-00002-of-00007.safetensors",
284
+ "language_model.model.layers.4.self_attn.k_proj.weight": "model-00002-of-00007.safetensors",
285
+ "language_model.model.layers.4.self_attn.o_proj.weight": "model-00002-of-00007.safetensors",
286
+ "language_model.model.layers.4.self_attn.q_proj.bias": "model-00002-of-00007.safetensors",
287
+ "language_model.model.layers.4.self_attn.q_proj.weight": "model-00002-of-00007.safetensors",
288
+ "language_model.model.layers.4.self_attn.v_proj.bias": "model-00002-of-00007.safetensors",
289
+ "language_model.model.layers.4.self_attn.v_proj.weight": "model-00002-of-00007.safetensors",
290
+ "language_model.model.layers.5.input_layernorm.weight": "model-00002-of-00007.safetensors",
291
+ "language_model.model.layers.5.mlp.down_proj.weight": "model-00002-of-00007.safetensors",
292
+ "language_model.model.layers.5.mlp.gate_proj.weight": "model-00002-of-00007.safetensors",
293
+ "language_model.model.layers.5.mlp.up_proj.weight": "model-00002-of-00007.safetensors",
294
+ "language_model.model.layers.5.post_attention_layernorm.weight": "model-00002-of-00007.safetensors",
295
+ "language_model.model.layers.5.self_attn.k_proj.bias": "model-00002-of-00007.safetensors",
296
+ "language_model.model.layers.5.self_attn.k_proj.weight": "model-00002-of-00007.safetensors",
297
+ "language_model.model.layers.5.self_attn.o_proj.weight": "model-00002-of-00007.safetensors",
298
+ "language_model.model.layers.5.self_attn.q_proj.bias": "model-00002-of-00007.safetensors",
299
+ "language_model.model.layers.5.self_attn.q_proj.weight": "model-00002-of-00007.safetensors",
300
+ "language_model.model.layers.5.self_attn.v_proj.bias": "model-00002-of-00007.safetensors",
301
+ "language_model.model.layers.5.self_attn.v_proj.weight": "model-00002-of-00007.safetensors",
302
+ "language_model.model.layers.6.input_layernorm.weight": "model-00003-of-00007.safetensors",
303
+ "language_model.model.layers.6.mlp.down_proj.weight": "model-00003-of-00007.safetensors",
304
+ "language_model.model.layers.6.mlp.gate_proj.weight": "model-00002-of-00007.safetensors",
305
+ "language_model.model.layers.6.mlp.up_proj.weight": "model-00003-of-00007.safetensors",
306
+ "language_model.model.layers.6.post_attention_layernorm.weight": "model-00003-of-00007.safetensors",
307
+ "language_model.model.layers.6.self_attn.k_proj.bias": "model-00002-of-00007.safetensors",
308
+ "language_model.model.layers.6.self_attn.k_proj.weight": "model-00002-of-00007.safetensors",
309
+ "language_model.model.layers.6.self_attn.o_proj.weight": "model-00002-of-00007.safetensors",
310
+ "language_model.model.layers.6.self_attn.q_proj.bias": "model-00002-of-00007.safetensors",
311
+ "language_model.model.layers.6.self_attn.q_proj.weight": "model-00002-of-00007.safetensors",
312
+ "language_model.model.layers.6.self_attn.v_proj.bias": "model-00002-of-00007.safetensors",
313
+ "language_model.model.layers.6.self_attn.v_proj.weight": "model-00002-of-00007.safetensors",
314
+ "language_model.model.layers.7.input_layernorm.weight": "model-00003-of-00007.safetensors",
315
+ "language_model.model.layers.7.mlp.down_proj.weight": "model-00003-of-00007.safetensors",
316
+ "language_model.model.layers.7.mlp.gate_proj.weight": "model-00003-of-00007.safetensors",
317
+ "language_model.model.layers.7.mlp.up_proj.weight": "model-00003-of-00007.safetensors",
318
+ "language_model.model.layers.7.post_attention_layernorm.weight": "model-00003-of-00007.safetensors",
319
+ "language_model.model.layers.7.self_attn.k_proj.bias": "model-00003-of-00007.safetensors",
320
+ "language_model.model.layers.7.self_attn.k_proj.weight": "model-00003-of-00007.safetensors",
321
+ "language_model.model.layers.7.self_attn.o_proj.weight": "model-00003-of-00007.safetensors",
322
+ "language_model.model.layers.7.self_attn.q_proj.bias": "model-00003-of-00007.safetensors",
323
+ "language_model.model.layers.7.self_attn.q_proj.weight": "model-00003-of-00007.safetensors",
324
+ "language_model.model.layers.7.self_attn.v_proj.bias": "model-00003-of-00007.safetensors",
325
+ "language_model.model.layers.7.self_attn.v_proj.weight": "model-00003-of-00007.safetensors",
326
+ "language_model.model.layers.8.input_layernorm.weight": "model-00003-of-00007.safetensors",
327
+ "language_model.model.layers.8.mlp.down_proj.weight": "model-00003-of-00007.safetensors",
328
+ "language_model.model.layers.8.mlp.gate_proj.weight": "model-00003-of-00007.safetensors",
329
+ "language_model.model.layers.8.mlp.up_proj.weight": "model-00003-of-00007.safetensors",
330
+ "language_model.model.layers.8.post_attention_layernorm.weight": "model-00003-of-00007.safetensors",
331
+ "language_model.model.layers.8.self_attn.k_proj.bias": "model-00003-of-00007.safetensors",
332
+ "language_model.model.layers.8.self_attn.k_proj.weight": "model-00003-of-00007.safetensors",
333
+ "language_model.model.layers.8.self_attn.o_proj.weight": "model-00003-of-00007.safetensors",
334
+ "language_model.model.layers.8.self_attn.q_proj.bias": "model-00003-of-00007.safetensors",
335
+ "language_model.model.layers.8.self_attn.q_proj.weight": "model-00003-of-00007.safetensors",
336
+ "language_model.model.layers.8.self_attn.v_proj.bias": "model-00003-of-00007.safetensors",
337
+ "language_model.model.layers.8.self_attn.v_proj.weight": "model-00003-of-00007.safetensors",
338
+ "language_model.model.layers.9.input_layernorm.weight": "model-00003-of-00007.safetensors",
339
+ "language_model.model.layers.9.mlp.down_proj.weight": "model-00003-of-00007.safetensors",
340
+ "language_model.model.layers.9.mlp.gate_proj.weight": "model-00003-of-00007.safetensors",
341
+ "language_model.model.layers.9.mlp.up_proj.weight": "model-00003-of-00007.safetensors",
342
+ "language_model.model.layers.9.post_attention_layernorm.weight": "model-00003-of-00007.safetensors",
343
+ "language_model.model.layers.9.self_attn.k_proj.bias": "model-00003-of-00007.safetensors",
344
+ "language_model.model.layers.9.self_attn.k_proj.weight": "model-00003-of-00007.safetensors",
345
+ "language_model.model.layers.9.self_attn.o_proj.weight": "model-00003-of-00007.safetensors",
346
+ "language_model.model.layers.9.self_attn.q_proj.bias": "model-00003-of-00007.safetensors",
347
+ "language_model.model.layers.9.self_attn.q_proj.weight": "model-00003-of-00007.safetensors",
348
+ "language_model.model.layers.9.self_attn.v_proj.bias": "model-00003-of-00007.safetensors",
349
+ "language_model.model.layers.9.self_attn.v_proj.weight": "model-00003-of-00007.safetensors",
350
+ "language_model.model.norm.weight": "model-00007-of-00007.safetensors",
351
+ "vision_model.high_layer_norm.bias": "model-00001-of-00007.safetensors",
352
+ "vision_model.high_layer_norm.weight": "model-00001-of-00007.safetensors",
353
+ "vision_model.low_layer_norm.bias": "model-00001-of-00007.safetensors",
354
+ "vision_model.low_layer_norm.weight": "model-00001-of-00007.safetensors",
355
+ "vision_model.vision_tower_high.vision_tower.blocks.0.attn.proj.bias": "model-00001-of-00007.safetensors",
356
+ "vision_model.vision_tower_high.vision_tower.blocks.0.attn.proj.weight": "model-00001-of-00007.safetensors",
357
+ "vision_model.vision_tower_high.vision_tower.blocks.0.attn.qkv.bias": "model-00001-of-00007.safetensors",
358
+ "vision_model.vision_tower_high.vision_tower.blocks.0.attn.qkv.weight": "model-00001-of-00007.safetensors",
359
+ "vision_model.vision_tower_high.vision_tower.blocks.0.attn.rel_pos_h": "model-00001-of-00007.safetensors",
360
+ "vision_model.vision_tower_high.vision_tower.blocks.0.attn.rel_pos_w": "model-00001-of-00007.safetensors",
361
+ "vision_model.vision_tower_high.vision_tower.blocks.0.mlp.lin1.bias": "model-00001-of-00007.safetensors",
362
+ "vision_model.vision_tower_high.vision_tower.blocks.0.mlp.lin1.weight": "model-00001-of-00007.safetensors",
363
+ "vision_model.vision_tower_high.vision_tower.blocks.0.mlp.lin2.bias": "model-00001-of-00007.safetensors",
364
+ "vision_model.vision_tower_high.vision_tower.blocks.0.mlp.lin2.weight": "model-00001-of-00007.safetensors",
365
+ "vision_model.vision_tower_high.vision_tower.blocks.0.norm1.bias": "model-00001-of-00007.safetensors",
366
+ "vision_model.vision_tower_high.vision_tower.blocks.0.norm1.weight": "model-00001-of-00007.safetensors",
367
+ "vision_model.vision_tower_high.vision_tower.blocks.0.norm2.bias": "model-00001-of-00007.safetensors",
368
+ "vision_model.vision_tower_high.vision_tower.blocks.0.norm2.weight": "model-00001-of-00007.safetensors",
369
+ "vision_model.vision_tower_high.vision_tower.blocks.1.attn.proj.bias": "model-00001-of-00007.safetensors",
370
+ "vision_model.vision_tower_high.vision_tower.blocks.1.attn.proj.weight": "model-00001-of-00007.safetensors",
371
+ "vision_model.vision_tower_high.vision_tower.blocks.1.attn.qkv.bias": "model-00001-of-00007.safetensors",
372
+ "vision_model.vision_tower_high.vision_tower.blocks.1.attn.qkv.weight": "model-00001-of-00007.safetensors",
373
+ "vision_model.vision_tower_high.vision_tower.blocks.1.attn.rel_pos_h": "model-00001-of-00007.safetensors",
374
+ "vision_model.vision_tower_high.vision_tower.blocks.1.attn.rel_pos_w": "model-00001-of-00007.safetensors",
375
+ "vision_model.vision_tower_high.vision_tower.blocks.1.mlp.lin1.bias": "model-00001-of-00007.safetensors",
376
+ "vision_model.vision_tower_high.vision_tower.blocks.1.mlp.lin1.weight": "model-00001-of-00007.safetensors",
377
+ "vision_model.vision_tower_high.vision_tower.blocks.1.mlp.lin2.bias": "model-00001-of-00007.safetensors",
378
+ "vision_model.vision_tower_high.vision_tower.blocks.1.mlp.lin2.weight": "model-00001-of-00007.safetensors",
379
+ "vision_model.vision_tower_high.vision_tower.blocks.1.norm1.bias": "model-00001-of-00007.safetensors",
380
+ "vision_model.vision_tower_high.vision_tower.blocks.1.norm1.weight": "model-00001-of-00007.safetensors",
381
+ "vision_model.vision_tower_high.vision_tower.blocks.1.norm2.bias": "model-00001-of-00007.safetensors",
382
+ "vision_model.vision_tower_high.vision_tower.blocks.1.norm2.weight": "model-00001-of-00007.safetensors",
383
+ "vision_model.vision_tower_high.vision_tower.blocks.10.attn.proj.bias": "model-00001-of-00007.safetensors",
384
+ "vision_model.vision_tower_high.vision_tower.blocks.10.attn.proj.weight": "model-00001-of-00007.safetensors",
385
+ "vision_model.vision_tower_high.vision_tower.blocks.10.attn.qkv.bias": "model-00001-of-00007.safetensors",
386
+ "vision_model.vision_tower_high.vision_tower.blocks.10.attn.qkv.weight": "model-00001-of-00007.safetensors",
387
+ "vision_model.vision_tower_high.vision_tower.blocks.10.attn.rel_pos_h": "model-00001-of-00007.safetensors",
388
+ "vision_model.vision_tower_high.vision_tower.blocks.10.attn.rel_pos_w": "model-00001-of-00007.safetensors",
389
+ "vision_model.vision_tower_high.vision_tower.blocks.10.mlp.lin1.bias": "model-00001-of-00007.safetensors",
390
+ "vision_model.vision_tower_high.vision_tower.blocks.10.mlp.lin1.weight": "model-00001-of-00007.safetensors",
391
+ "vision_model.vision_tower_high.vision_tower.blocks.10.mlp.lin2.bias": "model-00001-of-00007.safetensors",
392
+ "vision_model.vision_tower_high.vision_tower.blocks.10.mlp.lin2.weight": "model-00001-of-00007.safetensors",
393
+ "vision_model.vision_tower_high.vision_tower.blocks.10.norm1.bias": "model-00001-of-00007.safetensors",
394
+ "vision_model.vision_tower_high.vision_tower.blocks.10.norm1.weight": "model-00001-of-00007.safetensors",
395
+ "vision_model.vision_tower_high.vision_tower.blocks.10.norm2.bias": "model-00001-of-00007.safetensors",
396
+ "vision_model.vision_tower_high.vision_tower.blocks.10.norm2.weight": "model-00001-of-00007.safetensors",
397
+ "vision_model.vision_tower_high.vision_tower.blocks.11.attn.proj.bias": "model-00001-of-00007.safetensors",
398
+ "vision_model.vision_tower_high.vision_tower.blocks.11.attn.proj.weight": "model-00001-of-00007.safetensors",
399
+ "vision_model.vision_tower_high.vision_tower.blocks.11.attn.qkv.bias": "model-00001-of-00007.safetensors",
400
+ "vision_model.vision_tower_high.vision_tower.blocks.11.attn.qkv.weight": "model-00001-of-00007.safetensors",
401
+ "vision_model.vision_tower_high.vision_tower.blocks.11.attn.rel_pos_h": "model-00001-of-00007.safetensors",
402
+ "vision_model.vision_tower_high.vision_tower.blocks.11.attn.rel_pos_w": "model-00001-of-00007.safetensors",
403
+ "vision_model.vision_tower_high.vision_tower.blocks.11.mlp.lin1.bias": "model-00001-of-00007.safetensors",
404
+ "vision_model.vision_tower_high.vision_tower.blocks.11.mlp.lin1.weight": "model-00001-of-00007.safetensors",
405
+ "vision_model.vision_tower_high.vision_tower.blocks.11.mlp.lin2.bias": "model-00001-of-00007.safetensors",
406
+ "vision_model.vision_tower_high.vision_tower.blocks.11.mlp.lin2.weight": "model-00001-of-00007.safetensors",
407
+ "vision_model.vision_tower_high.vision_tower.blocks.11.norm1.bias": "model-00001-of-00007.safetensors",
408
+ "vision_model.vision_tower_high.vision_tower.blocks.11.norm1.weight": "model-00001-of-00007.safetensors",
409
+ "vision_model.vision_tower_high.vision_tower.blocks.11.norm2.bias": "model-00001-of-00007.safetensors",
410
+ "vision_model.vision_tower_high.vision_tower.blocks.11.norm2.weight": "model-00001-of-00007.safetensors",
411
+ "vision_model.vision_tower_high.vision_tower.blocks.2.attn.proj.bias": "model-00001-of-00007.safetensors",
412
+ "vision_model.vision_tower_high.vision_tower.blocks.2.attn.proj.weight": "model-00001-of-00007.safetensors",
413
+ "vision_model.vision_tower_high.vision_tower.blocks.2.attn.qkv.bias": "model-00001-of-00007.safetensors",
414
+ "vision_model.vision_tower_high.vision_tower.blocks.2.attn.qkv.weight": "model-00001-of-00007.safetensors",
415
+ "vision_model.vision_tower_high.vision_tower.blocks.2.attn.rel_pos_h": "model-00001-of-00007.safetensors",
416
+ "vision_model.vision_tower_high.vision_tower.blocks.2.attn.rel_pos_w": "model-00001-of-00007.safetensors",
417
+ "vision_model.vision_tower_high.vision_tower.blocks.2.mlp.lin1.bias": "model-00001-of-00007.safetensors",
418
+ "vision_model.vision_tower_high.vision_tower.blocks.2.mlp.lin1.weight": "model-00001-of-00007.safetensors",
419
+ "vision_model.vision_tower_high.vision_tower.blocks.2.mlp.lin2.bias": "model-00001-of-00007.safetensors",
420
+ "vision_model.vision_tower_high.vision_tower.blocks.2.mlp.lin2.weight": "model-00001-of-00007.safetensors",
421
+ "vision_model.vision_tower_high.vision_tower.blocks.2.norm1.bias": "model-00001-of-00007.safetensors",
422
+ "vision_model.vision_tower_high.vision_tower.blocks.2.norm1.weight": "model-00001-of-00007.safetensors",
423
+ "vision_model.vision_tower_high.vision_tower.blocks.2.norm2.bias": "model-00001-of-00007.safetensors",
424
+ "vision_model.vision_tower_high.vision_tower.blocks.2.norm2.weight": "model-00001-of-00007.safetensors",
425
+ "vision_model.vision_tower_high.vision_tower.blocks.3.attn.proj.bias": "model-00001-of-00007.safetensors",
426
+ "vision_model.vision_tower_high.vision_tower.blocks.3.attn.proj.weight": "model-00001-of-00007.safetensors",
427
+ "vision_model.vision_tower_high.vision_tower.blocks.3.attn.qkv.bias": "model-00001-of-00007.safetensors",
428
+ "vision_model.vision_tower_high.vision_tower.blocks.3.attn.qkv.weight": "model-00001-of-00007.safetensors",
429
+ "vision_model.vision_tower_high.vision_tower.blocks.3.attn.rel_pos_h": "model-00001-of-00007.safetensors",
430
+ "vision_model.vision_tower_high.vision_tower.blocks.3.attn.rel_pos_w": "model-00001-of-00007.safetensors",
431
+ "vision_model.vision_tower_high.vision_tower.blocks.3.mlp.lin1.bias": "model-00001-of-00007.safetensors",
432
+ "vision_model.vision_tower_high.vision_tower.blocks.3.mlp.lin1.weight": "model-00001-of-00007.safetensors",
433
+ "vision_model.vision_tower_high.vision_tower.blocks.3.mlp.lin2.bias": "model-00001-of-00007.safetensors",
434
+ "vision_model.vision_tower_high.vision_tower.blocks.3.mlp.lin2.weight": "model-00001-of-00007.safetensors",
435
+ "vision_model.vision_tower_high.vision_tower.blocks.3.norm1.bias": "model-00001-of-00007.safetensors",
436
+ "vision_model.vision_tower_high.vision_tower.blocks.3.norm1.weight": "model-00001-of-00007.safetensors",
437
+ "vision_model.vision_tower_high.vision_tower.blocks.3.norm2.bias": "model-00001-of-00007.safetensors",
438
+ "vision_model.vision_tower_high.vision_tower.blocks.3.norm2.weight": "model-00001-of-00007.safetensors",
439
+ "vision_model.vision_tower_high.vision_tower.blocks.4.attn.proj.bias": "model-00001-of-00007.safetensors",
440
+ "vision_model.vision_tower_high.vision_tower.blocks.4.attn.proj.weight": "model-00001-of-00007.safetensors",
441
+ "vision_model.vision_tower_high.vision_tower.blocks.4.attn.qkv.bias": "model-00001-of-00007.safetensors",
442
+ "vision_model.vision_tower_high.vision_tower.blocks.4.attn.qkv.weight": "model-00001-of-00007.safetensors",
443
+ "vision_model.vision_tower_high.vision_tower.blocks.4.attn.rel_pos_h": "model-00001-of-00007.safetensors",
444
+ "vision_model.vision_tower_high.vision_tower.blocks.4.attn.rel_pos_w": "model-00001-of-00007.safetensors",
445
+ "vision_model.vision_tower_high.vision_tower.blocks.4.mlp.lin1.bias": "model-00001-of-00007.safetensors",
446
+ "vision_model.vision_tower_high.vision_tower.blocks.4.mlp.lin1.weight": "model-00001-of-00007.safetensors",
447
+ "vision_model.vision_tower_high.vision_tower.blocks.4.mlp.lin2.bias": "model-00001-of-00007.safetensors",
448
+ "vision_model.vision_tower_high.vision_tower.blocks.4.mlp.lin2.weight": "model-00001-of-00007.safetensors",
449
+ "vision_model.vision_tower_high.vision_tower.blocks.4.norm1.bias": "model-00001-of-00007.safetensors",
450
+ "vision_model.vision_tower_high.vision_tower.blocks.4.norm1.weight": "model-00001-of-00007.safetensors",
451
+ "vision_model.vision_tower_high.vision_tower.blocks.4.norm2.bias": "model-00001-of-00007.safetensors",
452
+ "vision_model.vision_tower_high.vision_tower.blocks.4.norm2.weight": "model-00001-of-00007.safetensors",
453
+ "vision_model.vision_tower_high.vision_tower.blocks.5.attn.proj.bias": "model-00001-of-00007.safetensors",
454
+ "vision_model.vision_tower_high.vision_tower.blocks.5.attn.proj.weight": "model-00001-of-00007.safetensors",
455
+ "vision_model.vision_tower_high.vision_tower.blocks.5.attn.qkv.bias": "model-00001-of-00007.safetensors",
456
+ "vision_model.vision_tower_high.vision_tower.blocks.5.attn.qkv.weight": "model-00001-of-00007.safetensors",
457
+ "vision_model.vision_tower_high.vision_tower.blocks.5.attn.rel_pos_h": "model-00001-of-00007.safetensors",
458
+ "vision_model.vision_tower_high.vision_tower.blocks.5.attn.rel_pos_w": "model-00001-of-00007.safetensors",
459
+ "vision_model.vision_tower_high.vision_tower.blocks.5.mlp.lin1.bias": "model-00001-of-00007.safetensors",
460
+ "vision_model.vision_tower_high.vision_tower.blocks.5.mlp.lin1.weight": "model-00001-of-00007.safetensors",
461
+ "vision_model.vision_tower_high.vision_tower.blocks.5.mlp.lin2.bias": "model-00001-of-00007.safetensors",
462
+ "vision_model.vision_tower_high.vision_tower.blocks.5.mlp.lin2.weight": "model-00001-of-00007.safetensors",
463
+ "vision_model.vision_tower_high.vision_tower.blocks.5.norm1.bias": "model-00001-of-00007.safetensors",
464
+ "vision_model.vision_tower_high.vision_tower.blocks.5.norm1.weight": "model-00001-of-00007.safetensors",
465
+ "vision_model.vision_tower_high.vision_tower.blocks.5.norm2.bias": "model-00001-of-00007.safetensors",
466
+ "vision_model.vision_tower_high.vision_tower.blocks.5.norm2.weight": "model-00001-of-00007.safetensors",
467
+ "vision_model.vision_tower_high.vision_tower.blocks.6.attn.proj.bias": "model-00001-of-00007.safetensors",
468
+ "vision_model.vision_tower_high.vision_tower.blocks.6.attn.proj.weight": "model-00001-of-00007.safetensors",
469
+ "vision_model.vision_tower_high.vision_tower.blocks.6.attn.qkv.bias": "model-00001-of-00007.safetensors",
470
+ "vision_model.vision_tower_high.vision_tower.blocks.6.attn.qkv.weight": "model-00001-of-00007.safetensors",
471
+ "vision_model.vision_tower_high.vision_tower.blocks.6.attn.rel_pos_h": "model-00001-of-00007.safetensors",
472
+ "vision_model.vision_tower_high.vision_tower.blocks.6.attn.rel_pos_w": "model-00001-of-00007.safetensors",
473
+ "vision_model.vision_tower_high.vision_tower.blocks.6.mlp.lin1.bias": "model-00001-of-00007.safetensors",
474
+ "vision_model.vision_tower_high.vision_tower.blocks.6.mlp.lin1.weight": "model-00001-of-00007.safetensors",
475
+ "vision_model.vision_tower_high.vision_tower.blocks.6.mlp.lin2.bias": "model-00001-of-00007.safetensors",
476
+ "vision_model.vision_tower_high.vision_tower.blocks.6.mlp.lin2.weight": "model-00001-of-00007.safetensors",
477
+ "vision_model.vision_tower_high.vision_tower.blocks.6.norm1.bias": "model-00001-of-00007.safetensors",
478
+ "vision_model.vision_tower_high.vision_tower.blocks.6.norm1.weight": "model-00001-of-00007.safetensors",
479
+ "vision_model.vision_tower_high.vision_tower.blocks.6.norm2.bias": "model-00001-of-00007.safetensors",
480
+ "vision_model.vision_tower_high.vision_tower.blocks.6.norm2.weight": "model-00001-of-00007.safetensors",
481
+ "vision_model.vision_tower_high.vision_tower.blocks.7.attn.proj.bias": "model-00001-of-00007.safetensors",
482
+ "vision_model.vision_tower_high.vision_tower.blocks.7.attn.proj.weight": "model-00001-of-00007.safetensors",
483
+ "vision_model.vision_tower_high.vision_tower.blocks.7.attn.qkv.bias": "model-00001-of-00007.safetensors",
484
+ "vision_model.vision_tower_high.vision_tower.blocks.7.attn.qkv.weight": "model-00001-of-00007.safetensors",
485
+ "vision_model.vision_tower_high.vision_tower.blocks.7.attn.rel_pos_h": "model-00001-of-00007.safetensors",
486
+ "vision_model.vision_tower_high.vision_tower.blocks.7.attn.rel_pos_w": "model-00001-of-00007.safetensors",
487
+ "vision_model.vision_tower_high.vision_tower.blocks.7.mlp.lin1.bias": "model-00001-of-00007.safetensors",
488
+ "vision_model.vision_tower_high.vision_tower.blocks.7.mlp.lin1.weight": "model-00001-of-00007.safetensors",
489
+ "vision_model.vision_tower_high.vision_tower.blocks.7.mlp.lin2.bias": "model-00001-of-00007.safetensors",
490
+ "vision_model.vision_tower_high.vision_tower.blocks.7.mlp.lin2.weight": "model-00001-of-00007.safetensors",
491
+ "vision_model.vision_tower_high.vision_tower.blocks.7.norm1.bias": "model-00001-of-00007.safetensors",
492
+ "vision_model.vision_tower_high.vision_tower.blocks.7.norm1.weight": "model-00001-of-00007.safetensors",
493
+ "vision_model.vision_tower_high.vision_tower.blocks.7.norm2.bias": "model-00001-of-00007.safetensors",
494
+ "vision_model.vision_tower_high.vision_tower.blocks.7.norm2.weight": "model-00001-of-00007.safetensors",
495
+ "vision_model.vision_tower_high.vision_tower.blocks.8.attn.proj.bias": "model-00001-of-00007.safetensors",
496
+ "vision_model.vision_tower_high.vision_tower.blocks.8.attn.proj.weight": "model-00001-of-00007.safetensors",
497
+ "vision_model.vision_tower_high.vision_tower.blocks.8.attn.qkv.bias": "model-00001-of-00007.safetensors",
498
+ "vision_model.vision_tower_high.vision_tower.blocks.8.attn.qkv.weight": "model-00001-of-00007.safetensors",
499
+ "vision_model.vision_tower_high.vision_tower.blocks.8.attn.rel_pos_h": "model-00001-of-00007.safetensors",
500
+ "vision_model.vision_tower_high.vision_tower.blocks.8.attn.rel_pos_w": "model-00001-of-00007.safetensors",
501
+ "vision_model.vision_tower_high.vision_tower.blocks.8.mlp.lin1.bias": "model-00001-of-00007.safetensors",
502
+ "vision_model.vision_tower_high.vision_tower.blocks.8.mlp.lin1.weight": "model-00001-of-00007.safetensors",
503
+ "vision_model.vision_tower_high.vision_tower.blocks.8.mlp.lin2.bias": "model-00001-of-00007.safetensors",
504
+ "vision_model.vision_tower_high.vision_tower.blocks.8.mlp.lin2.weight": "model-00001-of-00007.safetensors",
505
+ "vision_model.vision_tower_high.vision_tower.blocks.8.norm1.bias": "model-00001-of-00007.safetensors",
506
+ "vision_model.vision_tower_high.vision_tower.blocks.8.norm1.weight": "model-00001-of-00007.safetensors",
507
+ "vision_model.vision_tower_high.vision_tower.blocks.8.norm2.bias": "model-00001-of-00007.safetensors",
508
+ "vision_model.vision_tower_high.vision_tower.blocks.8.norm2.weight": "model-00001-of-00007.safetensors",
509
+ "vision_model.vision_tower_high.vision_tower.blocks.9.attn.proj.bias": "model-00001-of-00007.safetensors",
510
+ "vision_model.vision_tower_high.vision_tower.blocks.9.attn.proj.weight": "model-00001-of-00007.safetensors",
511
+ "vision_model.vision_tower_high.vision_tower.blocks.9.attn.qkv.bias": "model-00001-of-00007.safetensors",
512
+ "vision_model.vision_tower_high.vision_tower.blocks.9.attn.qkv.weight": "model-00001-of-00007.safetensors",
513
+ "vision_model.vision_tower_high.vision_tower.blocks.9.attn.rel_pos_h": "model-00001-of-00007.safetensors",
514
+ "vision_model.vision_tower_high.vision_tower.blocks.9.attn.rel_pos_w": "model-00001-of-00007.safetensors",
515
+ "vision_model.vision_tower_high.vision_tower.blocks.9.mlp.lin1.bias": "model-00001-of-00007.safetensors",
516
+ "vision_model.vision_tower_high.vision_tower.blocks.9.mlp.lin1.weight": "model-00001-of-00007.safetensors",
517
+ "vision_model.vision_tower_high.vision_tower.blocks.9.mlp.lin2.bias": "model-00001-of-00007.safetensors",
518
+ "vision_model.vision_tower_high.vision_tower.blocks.9.mlp.lin2.weight": "model-00001-of-00007.safetensors",
519
+ "vision_model.vision_tower_high.vision_tower.blocks.9.norm1.bias": "model-00001-of-00007.safetensors",
520
+ "vision_model.vision_tower_high.vision_tower.blocks.9.norm1.weight": "model-00001-of-00007.safetensors",
521
+ "vision_model.vision_tower_high.vision_tower.blocks.9.norm2.bias": "model-00001-of-00007.safetensors",
522
+ "vision_model.vision_tower_high.vision_tower.blocks.9.norm2.weight": "model-00001-of-00007.safetensors",
523
+ "vision_model.vision_tower_high.vision_tower.downsamples.0.weight": "model-00001-of-00007.safetensors",
524
+ "vision_model.vision_tower_high.vision_tower.downsamples.1.weight": "model-00001-of-00007.safetensors",
525
+ "vision_model.vision_tower_high.vision_tower.hd_alpha_downsamples": "model-00001-of-00007.safetensors",
526
+ "vision_model.vision_tower_high.vision_tower.neck.0.weight": "model-00001-of-00007.safetensors",
527
+ "vision_model.vision_tower_high.vision_tower.neck.1.bias": "model-00001-of-00007.safetensors",
528
+ "vision_model.vision_tower_high.vision_tower.neck.1.weight": "model-00001-of-00007.safetensors",
529
+ "vision_model.vision_tower_high.vision_tower.neck.2.weight": "model-00001-of-00007.safetensors",
530
+ "vision_model.vision_tower_high.vision_tower.neck.3.bias": "model-00001-of-00007.safetensors",
531
+ "vision_model.vision_tower_high.vision_tower.neck.3.weight": "model-00001-of-00007.safetensors",
532
+ "vision_model.vision_tower_high.vision_tower.neck_hd.0.weight": "model-00001-of-00007.safetensors",
533
+ "vision_model.vision_tower_high.vision_tower.neck_hd.1.bias": "model-00001-of-00007.safetensors",
534
+ "vision_model.vision_tower_high.vision_tower.neck_hd.1.weight": "model-00001-of-00007.safetensors",
535
+ "vision_model.vision_tower_high.vision_tower.neck_hd.2.weight": "model-00001-of-00007.safetensors",
536
+ "vision_model.vision_tower_high.vision_tower.neck_hd.3.bias": "model-00001-of-00007.safetensors",
537
+ "vision_model.vision_tower_high.vision_tower.neck_hd.3.weight": "model-00001-of-00007.safetensors",
538
+ "vision_model.vision_tower_high.vision_tower.patch_embed.proj.bias": "model-00001-of-00007.safetensors",
539
+ "vision_model.vision_tower_high.vision_tower.patch_embed.proj.weight": "model-00001-of-00007.safetensors",
540
+ "vision_model.vision_tower_high.vision_tower.pos_embed": "model-00001-of-00007.safetensors",
541
+ "vision_model.vision_tower_low.vision_tower.attn_pool.kv.bias": "model-00001-of-00007.safetensors",
542
+ "vision_model.vision_tower_low.vision_tower.attn_pool.kv.weight": "model-00001-of-00007.safetensors",
543
+ "vision_model.vision_tower_low.vision_tower.attn_pool.latent": "model-00001-of-00007.safetensors",
544
+ "vision_model.vision_tower_low.vision_tower.attn_pool.mlp.fc1.bias": "model-00001-of-00007.safetensors",
545
+ "vision_model.vision_tower_low.vision_tower.attn_pool.mlp.fc1.weight": "model-00001-of-00007.safetensors",
546
+ "vision_model.vision_tower_low.vision_tower.attn_pool.mlp.fc2.bias": "model-00001-of-00007.safetensors",
547
+ "vision_model.vision_tower_low.vision_tower.attn_pool.mlp.fc2.weight": "model-00001-of-00007.safetensors",
548
+ "vision_model.vision_tower_low.vision_tower.attn_pool.norm.bias": "model-00001-of-00007.safetensors",
549
+ "vision_model.vision_tower_low.vision_tower.attn_pool.norm.weight": "model-00001-of-00007.safetensors",
550
+ "vision_model.vision_tower_low.vision_tower.attn_pool.proj.bias": "model-00001-of-00007.safetensors",
551
+ "vision_model.vision_tower_low.vision_tower.attn_pool.proj.weight": "model-00001-of-00007.safetensors",
552
+ "vision_model.vision_tower_low.vision_tower.attn_pool.q.bias": "model-00001-of-00007.safetensors",
553
+ "vision_model.vision_tower_low.vision_tower.attn_pool.q.weight": "model-00001-of-00007.safetensors",
554
+ "vision_model.vision_tower_low.vision_tower.blocks.0.attn.proj.bias": "model-00001-of-00007.safetensors",
555
+ "vision_model.vision_tower_low.vision_tower.blocks.0.attn.proj.weight": "model-00001-of-00007.safetensors",
556
+ "vision_model.vision_tower_low.vision_tower.blocks.0.attn.qkv.bias": "model-00001-of-00007.safetensors",
557
+ "vision_model.vision_tower_low.vision_tower.blocks.0.attn.qkv.weight": "model-00001-of-00007.safetensors",
558
+ "vision_model.vision_tower_low.vision_tower.blocks.0.mlp.fc1.bias": "model-00001-of-00007.safetensors",
559
+ "vision_model.vision_tower_low.vision_tower.blocks.0.mlp.fc1.weight": "model-00001-of-00007.safetensors",
560
+ "vision_model.vision_tower_low.vision_tower.blocks.0.mlp.fc2.bias": "model-00001-of-00007.safetensors",
561
+ "vision_model.vision_tower_low.vision_tower.blocks.0.mlp.fc2.weight": "model-00001-of-00007.safetensors",
562
+ "vision_model.vision_tower_low.vision_tower.blocks.0.norm1.bias": "model-00001-of-00007.safetensors",
563
+ "vision_model.vision_tower_low.vision_tower.blocks.0.norm1.weight": "model-00001-of-00007.safetensors",
564
+ "vision_model.vision_tower_low.vision_tower.blocks.0.norm2.bias": "model-00001-of-00007.safetensors",
565
+ "vision_model.vision_tower_low.vision_tower.blocks.0.norm2.weight": "model-00001-of-00007.safetensors",
566
+ "vision_model.vision_tower_low.vision_tower.blocks.1.attn.proj.bias": "model-00001-of-00007.safetensors",
567
+ "vision_model.vision_tower_low.vision_tower.blocks.1.attn.proj.weight": "model-00001-of-00007.safetensors",
568
+ "vision_model.vision_tower_low.vision_tower.blocks.1.attn.qkv.bias": "model-00001-of-00007.safetensors",
569
+ "vision_model.vision_tower_low.vision_tower.blocks.1.attn.qkv.weight": "model-00001-of-00007.safetensors",
570
+ "vision_model.vision_tower_low.vision_tower.blocks.1.mlp.fc1.bias": "model-00001-of-00007.safetensors",
571
+ "vision_model.vision_tower_low.vision_tower.blocks.1.mlp.fc1.weight": "model-00001-of-00007.safetensors",
572
+ "vision_model.vision_tower_low.vision_tower.blocks.1.mlp.fc2.bias": "model-00001-of-00007.safetensors",
573
+ "vision_model.vision_tower_low.vision_tower.blocks.1.mlp.fc2.weight": "model-00001-of-00007.safetensors",
574
+ "vision_model.vision_tower_low.vision_tower.blocks.1.norm1.bias": "model-00001-of-00007.safetensors",
575
+ "vision_model.vision_tower_low.vision_tower.blocks.1.norm1.weight": "model-00001-of-00007.safetensors",
576
+ "vision_model.vision_tower_low.vision_tower.blocks.1.norm2.bias": "model-00001-of-00007.safetensors",
577
+ "vision_model.vision_tower_low.vision_tower.blocks.1.norm2.weight": "model-00001-of-00007.safetensors",
578
+ "vision_model.vision_tower_low.vision_tower.blocks.10.attn.proj.bias": "model-00001-of-00007.safetensors",
579
+ "vision_model.vision_tower_low.vision_tower.blocks.10.attn.proj.weight": "model-00001-of-00007.safetensors",
580
+ "vision_model.vision_tower_low.vision_tower.blocks.10.attn.qkv.bias": "model-00001-of-00007.safetensors",
581
+ "vision_model.vision_tower_low.vision_tower.blocks.10.attn.qkv.weight": "model-00001-of-00007.safetensors",
582
+ "vision_model.vision_tower_low.vision_tower.blocks.10.mlp.fc1.bias": "model-00001-of-00007.safetensors",
583
+ "vision_model.vision_tower_low.vision_tower.blocks.10.mlp.fc1.weight": "model-00001-of-00007.safetensors",
584
+ "vision_model.vision_tower_low.vision_tower.blocks.10.mlp.fc2.bias": "model-00001-of-00007.safetensors",
585
+ "vision_model.vision_tower_low.vision_tower.blocks.10.mlp.fc2.weight": "model-00001-of-00007.safetensors",
586
+ "vision_model.vision_tower_low.vision_tower.blocks.10.norm1.bias": "model-00001-of-00007.safetensors",
587
+ "vision_model.vision_tower_low.vision_tower.blocks.10.norm1.weight": "model-00001-of-00007.safetensors",
588
+ "vision_model.vision_tower_low.vision_tower.blocks.10.norm2.bias": "model-00001-of-00007.safetensors",
589
+ "vision_model.vision_tower_low.vision_tower.blocks.10.norm2.weight": "model-00001-of-00007.safetensors",
590
+ "vision_model.vision_tower_low.vision_tower.blocks.11.attn.proj.bias": "model-00001-of-00007.safetensors",
591
+ "vision_model.vision_tower_low.vision_tower.blocks.11.attn.proj.weight": "model-00001-of-00007.safetensors",
592
+ "vision_model.vision_tower_low.vision_tower.blocks.11.attn.qkv.bias": "model-00001-of-00007.safetensors",
593
+ "vision_model.vision_tower_low.vision_tower.blocks.11.attn.qkv.weight": "model-00001-of-00007.safetensors",
594
+ "vision_model.vision_tower_low.vision_tower.blocks.11.mlp.fc1.bias": "model-00001-of-00007.safetensors",
595
+ "vision_model.vision_tower_low.vision_tower.blocks.11.mlp.fc1.weight": "model-00001-of-00007.safetensors",
596
+ "vision_model.vision_tower_low.vision_tower.blocks.11.mlp.fc2.bias": "model-00001-of-00007.safetensors",
597
+ "vision_model.vision_tower_low.vision_tower.blocks.11.mlp.fc2.weight": "model-00001-of-00007.safetensors",
598
+ "vision_model.vision_tower_low.vision_tower.blocks.11.norm1.bias": "model-00001-of-00007.safetensors",
599
+ "vision_model.vision_tower_low.vision_tower.blocks.11.norm1.weight": "model-00001-of-00007.safetensors",
600
+ "vision_model.vision_tower_low.vision_tower.blocks.11.norm2.bias": "model-00001-of-00007.safetensors",
601
+ "vision_model.vision_tower_low.vision_tower.blocks.11.norm2.weight": "model-00001-of-00007.safetensors",
602
+ "vision_model.vision_tower_low.vision_tower.blocks.12.attn.proj.bias": "model-00001-of-00007.safetensors",
603
+ "vision_model.vision_tower_low.vision_tower.blocks.12.attn.proj.weight": "model-00001-of-00007.safetensors",
604
+ "vision_model.vision_tower_low.vision_tower.blocks.12.attn.qkv.bias": "model-00001-of-00007.safetensors",
605
+ "vision_model.vision_tower_low.vision_tower.blocks.12.attn.qkv.weight": "model-00001-of-00007.safetensors",
606
+ "vision_model.vision_tower_low.vision_tower.blocks.12.mlp.fc1.bias": "model-00001-of-00007.safetensors",
607
+ "vision_model.vision_tower_low.vision_tower.blocks.12.mlp.fc1.weight": "model-00001-of-00007.safetensors",
608
+ "vision_model.vision_tower_low.vision_tower.blocks.12.mlp.fc2.bias": "model-00001-of-00007.safetensors",
609
+ "vision_model.vision_tower_low.vision_tower.blocks.12.mlp.fc2.weight": "model-00001-of-00007.safetensors",
610
+ "vision_model.vision_tower_low.vision_tower.blocks.12.norm1.bias": "model-00001-of-00007.safetensors",
611
+ "vision_model.vision_tower_low.vision_tower.blocks.12.norm1.weight": "model-00001-of-00007.safetensors",
612
+ "vision_model.vision_tower_low.vision_tower.blocks.12.norm2.bias": "model-00001-of-00007.safetensors",
613
+ "vision_model.vision_tower_low.vision_tower.blocks.12.norm2.weight": "model-00001-of-00007.safetensors",
614
+ "vision_model.vision_tower_low.vision_tower.blocks.13.attn.proj.bias": "model-00001-of-00007.safetensors",
615
+ "vision_model.vision_tower_low.vision_tower.blocks.13.attn.proj.weight": "model-00001-of-00007.safetensors",
616
+ "vision_model.vision_tower_low.vision_tower.blocks.13.attn.qkv.bias": "model-00001-of-00007.safetensors",
617
+ "vision_model.vision_tower_low.vision_tower.blocks.13.attn.qkv.weight": "model-00001-of-00007.safetensors",
618
+ "vision_model.vision_tower_low.vision_tower.blocks.13.mlp.fc1.bias": "model-00001-of-00007.safetensors",
619
+ "vision_model.vision_tower_low.vision_tower.blocks.13.mlp.fc1.weight": "model-00001-of-00007.safetensors",
620
+ "vision_model.vision_tower_low.vision_tower.blocks.13.mlp.fc2.bias": "model-00001-of-00007.safetensors",
621
+ "vision_model.vision_tower_low.vision_tower.blocks.13.mlp.fc2.weight": "model-00001-of-00007.safetensors",
622
+ "vision_model.vision_tower_low.vision_tower.blocks.13.norm1.bias": "model-00001-of-00007.safetensors",
623
+ "vision_model.vision_tower_low.vision_tower.blocks.13.norm1.weight": "model-00001-of-00007.safetensors",
624
+ "vision_model.vision_tower_low.vision_tower.blocks.13.norm2.bias": "model-00001-of-00007.safetensors",
625
+ "vision_model.vision_tower_low.vision_tower.blocks.13.norm2.weight": "model-00001-of-00007.safetensors",
626
+ "vision_model.vision_tower_low.vision_tower.blocks.14.attn.proj.bias": "model-00001-of-00007.safetensors",
627
+ "vision_model.vision_tower_low.vision_tower.blocks.14.attn.proj.weight": "model-00001-of-00007.safetensors",
628
+ "vision_model.vision_tower_low.vision_tower.blocks.14.attn.qkv.bias": "model-00001-of-00007.safetensors",
629
+ "vision_model.vision_tower_low.vision_tower.blocks.14.attn.qkv.weight": "model-00001-of-00007.safetensors",
630
+ "vision_model.vision_tower_low.vision_tower.blocks.14.mlp.fc1.bias": "model-00001-of-00007.safetensors",
631
+ "vision_model.vision_tower_low.vision_tower.blocks.14.mlp.fc1.weight": "model-00001-of-00007.safetensors",
632
+ "vision_model.vision_tower_low.vision_tower.blocks.14.mlp.fc2.bias": "model-00001-of-00007.safetensors",
633
+ "vision_model.vision_tower_low.vision_tower.blocks.14.mlp.fc2.weight": "model-00001-of-00007.safetensors",
634
+ "vision_model.vision_tower_low.vision_tower.blocks.14.norm1.bias": "model-00001-of-00007.safetensors",
635
+ "vision_model.vision_tower_low.vision_tower.blocks.14.norm1.weight": "model-00001-of-00007.safetensors",
636
+ "vision_model.vision_tower_low.vision_tower.blocks.14.norm2.bias": "model-00001-of-00007.safetensors",
637
+ "vision_model.vision_tower_low.vision_tower.blocks.14.norm2.weight": "model-00001-of-00007.safetensors",
638
+ "vision_model.vision_tower_low.vision_tower.blocks.15.attn.proj.bias": "model-00001-of-00007.safetensors",
639
+ "vision_model.vision_tower_low.vision_tower.blocks.15.attn.proj.weight": "model-00001-of-00007.safetensors",
640
+ "vision_model.vision_tower_low.vision_tower.blocks.15.attn.qkv.bias": "model-00001-of-00007.safetensors",
641
+ "vision_model.vision_tower_low.vision_tower.blocks.15.attn.qkv.weight": "model-00001-of-00007.safetensors",
642
+ "vision_model.vision_tower_low.vision_tower.blocks.15.mlp.fc1.bias": "model-00001-of-00007.safetensors",
643
+ "vision_model.vision_tower_low.vision_tower.blocks.15.mlp.fc1.weight": "model-00001-of-00007.safetensors",
644
+ "vision_model.vision_tower_low.vision_tower.blocks.15.mlp.fc2.bias": "model-00001-of-00007.safetensors",
645
+ "vision_model.vision_tower_low.vision_tower.blocks.15.mlp.fc2.weight": "model-00001-of-00007.safetensors",
646
+ "vision_model.vision_tower_low.vision_tower.blocks.15.norm1.bias": "model-00001-of-00007.safetensors",
647
+ "vision_model.vision_tower_low.vision_tower.blocks.15.norm1.weight": "model-00001-of-00007.safetensors",
648
+ "vision_model.vision_tower_low.vision_tower.blocks.15.norm2.bias": "model-00001-of-00007.safetensors",
649
+ "vision_model.vision_tower_low.vision_tower.blocks.15.norm2.weight": "model-00001-of-00007.safetensors",
650
+ "vision_model.vision_tower_low.vision_tower.blocks.16.attn.proj.bias": "model-00001-of-00007.safetensors",
651
+ "vision_model.vision_tower_low.vision_tower.blocks.16.attn.proj.weight": "model-00001-of-00007.safetensors",
652
+ "vision_model.vision_tower_low.vision_tower.blocks.16.attn.qkv.bias": "model-00001-of-00007.safetensors",
653
+ "vision_model.vision_tower_low.vision_tower.blocks.16.attn.qkv.weight": "model-00001-of-00007.safetensors",
654
+ "vision_model.vision_tower_low.vision_tower.blocks.16.mlp.fc1.bias": "model-00001-of-00007.safetensors",
655
+ "vision_model.vision_tower_low.vision_tower.blocks.16.mlp.fc1.weight": "model-00001-of-00007.safetensors",
656
+ "vision_model.vision_tower_low.vision_tower.blocks.16.mlp.fc2.bias": "model-00001-of-00007.safetensors",
657
+ "vision_model.vision_tower_low.vision_tower.blocks.16.mlp.fc2.weight": "model-00001-of-00007.safetensors",
658
+ "vision_model.vision_tower_low.vision_tower.blocks.16.norm1.bias": "model-00001-of-00007.safetensors",
659
+ "vision_model.vision_tower_low.vision_tower.blocks.16.norm1.weight": "model-00001-of-00007.safetensors",
660
+ "vision_model.vision_tower_low.vision_tower.blocks.16.norm2.bias": "model-00001-of-00007.safetensors",
661
+ "vision_model.vision_tower_low.vision_tower.blocks.16.norm2.weight": "model-00001-of-00007.safetensors",
662
+ "vision_model.vision_tower_low.vision_tower.blocks.17.attn.proj.bias": "model-00001-of-00007.safetensors",
663
+ "vision_model.vision_tower_low.vision_tower.blocks.17.attn.proj.weight": "model-00001-of-00007.safetensors",
664
+ "vision_model.vision_tower_low.vision_tower.blocks.17.attn.qkv.bias": "model-00001-of-00007.safetensors",
665
+ "vision_model.vision_tower_low.vision_tower.blocks.17.attn.qkv.weight": "model-00001-of-00007.safetensors",
666
+ "vision_model.vision_tower_low.vision_tower.blocks.17.mlp.fc1.bias": "model-00001-of-00007.safetensors",
667
+ "vision_model.vision_tower_low.vision_tower.blocks.17.mlp.fc1.weight": "model-00001-of-00007.safetensors",
668
+ "vision_model.vision_tower_low.vision_tower.blocks.17.mlp.fc2.bias": "model-00001-of-00007.safetensors",
669
+ "vision_model.vision_tower_low.vision_tower.blocks.17.mlp.fc2.weight": "model-00001-of-00007.safetensors",
670
+ "vision_model.vision_tower_low.vision_tower.blocks.17.norm1.bias": "model-00001-of-00007.safetensors",
671
+ "vision_model.vision_tower_low.vision_tower.blocks.17.norm1.weight": "model-00001-of-00007.safetensors",
672
+ "vision_model.vision_tower_low.vision_tower.blocks.17.norm2.bias": "model-00001-of-00007.safetensors",
673
+ "vision_model.vision_tower_low.vision_tower.blocks.17.norm2.weight": "model-00001-of-00007.safetensors",
674
+ "vision_model.vision_tower_low.vision_tower.blocks.18.attn.proj.bias": "model-00001-of-00007.safetensors",
675
+ "vision_model.vision_tower_low.vision_tower.blocks.18.attn.proj.weight": "model-00001-of-00007.safetensors",
676
+ "vision_model.vision_tower_low.vision_tower.blocks.18.attn.qkv.bias": "model-00001-of-00007.safetensors",
677
+ "vision_model.vision_tower_low.vision_tower.blocks.18.attn.qkv.weight": "model-00001-of-00007.safetensors",
678
+ "vision_model.vision_tower_low.vision_tower.blocks.18.mlp.fc1.bias": "model-00001-of-00007.safetensors",
679
+ "vision_model.vision_tower_low.vision_tower.blocks.18.mlp.fc1.weight": "model-00001-of-00007.safetensors",
680
+ "vision_model.vision_tower_low.vision_tower.blocks.18.mlp.fc2.bias": "model-00001-of-00007.safetensors",
681
+ "vision_model.vision_tower_low.vision_tower.blocks.18.mlp.fc2.weight": "model-00001-of-00007.safetensors",
682
+ "vision_model.vision_tower_low.vision_tower.blocks.18.norm1.bias": "model-00001-of-00007.safetensors",
683
+ "vision_model.vision_tower_low.vision_tower.blocks.18.norm1.weight": "model-00001-of-00007.safetensors",
684
+ "vision_model.vision_tower_low.vision_tower.blocks.18.norm2.bias": "model-00001-of-00007.safetensors",
685
+ "vision_model.vision_tower_low.vision_tower.blocks.18.norm2.weight": "model-00001-of-00007.safetensors",
686
+ "vision_model.vision_tower_low.vision_tower.blocks.19.attn.proj.bias": "model-00001-of-00007.safetensors",
687
+ "vision_model.vision_tower_low.vision_tower.blocks.19.attn.proj.weight": "model-00001-of-00007.safetensors",
688
+ "vision_model.vision_tower_low.vision_tower.blocks.19.attn.qkv.bias": "model-00001-of-00007.safetensors",
689
+ "vision_model.vision_tower_low.vision_tower.blocks.19.attn.qkv.weight": "model-00001-of-00007.safetensors",
690
+ "vision_model.vision_tower_low.vision_tower.blocks.19.mlp.fc1.bias": "model-00001-of-00007.safetensors",
691
+ "vision_model.vision_tower_low.vision_tower.blocks.19.mlp.fc1.weight": "model-00001-of-00007.safetensors",
692
+ "vision_model.vision_tower_low.vision_tower.blocks.19.mlp.fc2.bias": "model-00001-of-00007.safetensors",
693
+ "vision_model.vision_tower_low.vision_tower.blocks.19.mlp.fc2.weight": "model-00001-of-00007.safetensors",
694
+ "vision_model.vision_tower_low.vision_tower.blocks.19.norm1.bias": "model-00001-of-00007.safetensors",
695
+ "vision_model.vision_tower_low.vision_tower.blocks.19.norm1.weight": "model-00001-of-00007.safetensors",
696
+ "vision_model.vision_tower_low.vision_tower.blocks.19.norm2.bias": "model-00001-of-00007.safetensors",
697
+ "vision_model.vision_tower_low.vision_tower.blocks.19.norm2.weight": "model-00001-of-00007.safetensors",
698
+ "vision_model.vision_tower_low.vision_tower.blocks.2.attn.proj.bias": "model-00001-of-00007.safetensors",
699
+ "vision_model.vision_tower_low.vision_tower.blocks.2.attn.proj.weight": "model-00001-of-00007.safetensors",
700
+ "vision_model.vision_tower_low.vision_tower.blocks.2.attn.qkv.bias": "model-00001-of-00007.safetensors",
701
+ "vision_model.vision_tower_low.vision_tower.blocks.2.attn.qkv.weight": "model-00001-of-00007.safetensors",
702
+ "vision_model.vision_tower_low.vision_tower.blocks.2.mlp.fc1.bias": "model-00001-of-00007.safetensors",
703
+ "vision_model.vision_tower_low.vision_tower.blocks.2.mlp.fc1.weight": "model-00001-of-00007.safetensors",
704
+ "vision_model.vision_tower_low.vision_tower.blocks.2.mlp.fc2.bias": "model-00001-of-00007.safetensors",
705
+ "vision_model.vision_tower_low.vision_tower.blocks.2.mlp.fc2.weight": "model-00001-of-00007.safetensors",
706
+ "vision_model.vision_tower_low.vision_tower.blocks.2.norm1.bias": "model-00001-of-00007.safetensors",
707
+ "vision_model.vision_tower_low.vision_tower.blocks.2.norm1.weight": "model-00001-of-00007.safetensors",
708
+ "vision_model.vision_tower_low.vision_tower.blocks.2.norm2.bias": "model-00001-of-00007.safetensors",
709
+ "vision_model.vision_tower_low.vision_tower.blocks.2.norm2.weight": "model-00001-of-00007.safetensors",
710
+ "vision_model.vision_tower_low.vision_tower.blocks.20.attn.proj.bias": "model-00001-of-00007.safetensors",
711
+ "vision_model.vision_tower_low.vision_tower.blocks.20.attn.proj.weight": "model-00001-of-00007.safetensors",
712
+ "vision_model.vision_tower_low.vision_tower.blocks.20.attn.qkv.bias": "model-00001-of-00007.safetensors",
713
+ "vision_model.vision_tower_low.vision_tower.blocks.20.attn.qkv.weight": "model-00001-of-00007.safetensors",
714
+ "vision_model.vision_tower_low.vision_tower.blocks.20.mlp.fc1.bias": "model-00001-of-00007.safetensors",
715
+ "vision_model.vision_tower_low.vision_tower.blocks.20.mlp.fc1.weight": "model-00001-of-00007.safetensors",
716
+ "vision_model.vision_tower_low.vision_tower.blocks.20.mlp.fc2.bias": "model-00001-of-00007.safetensors",
717
+ "vision_model.vision_tower_low.vision_tower.blocks.20.mlp.fc2.weight": "model-00001-of-00007.safetensors",
718
+ "vision_model.vision_tower_low.vision_tower.blocks.20.norm1.bias": "model-00001-of-00007.safetensors",
719
+ "vision_model.vision_tower_low.vision_tower.blocks.20.norm1.weight": "model-00001-of-00007.safetensors",
720
+ "vision_model.vision_tower_low.vision_tower.blocks.20.norm2.bias": "model-00001-of-00007.safetensors",
721
+ "vision_model.vision_tower_low.vision_tower.blocks.20.norm2.weight": "model-00001-of-00007.safetensors",
722
+ "vision_model.vision_tower_low.vision_tower.blocks.21.attn.proj.bias": "model-00001-of-00007.safetensors",
723
+ "vision_model.vision_tower_low.vision_tower.blocks.21.attn.proj.weight": "model-00001-of-00007.safetensors",
724
+ "vision_model.vision_tower_low.vision_tower.blocks.21.attn.qkv.bias": "model-00001-of-00007.safetensors",
725
+ "vision_model.vision_tower_low.vision_tower.blocks.21.attn.qkv.weight": "model-00001-of-00007.safetensors",
726
+ "vision_model.vision_tower_low.vision_tower.blocks.21.mlp.fc1.bias": "model-00001-of-00007.safetensors",
727
+ "vision_model.vision_tower_low.vision_tower.blocks.21.mlp.fc1.weight": "model-00001-of-00007.safetensors",
728
+ "vision_model.vision_tower_low.vision_tower.blocks.21.mlp.fc2.bias": "model-00001-of-00007.safetensors",
729
+ "vision_model.vision_tower_low.vision_tower.blocks.21.mlp.fc2.weight": "model-00001-of-00007.safetensors",
730
+ "vision_model.vision_tower_low.vision_tower.blocks.21.norm1.bias": "model-00001-of-00007.safetensors",
731
+ "vision_model.vision_tower_low.vision_tower.blocks.21.norm1.weight": "model-00001-of-00007.safetensors",
732
+ "vision_model.vision_tower_low.vision_tower.blocks.21.norm2.bias": "model-00001-of-00007.safetensors",
733
+ "vision_model.vision_tower_low.vision_tower.blocks.21.norm2.weight": "model-00001-of-00007.safetensors",
734
+ "vision_model.vision_tower_low.vision_tower.blocks.22.attn.proj.bias": "model-00001-of-00007.safetensors",
735
+ "vision_model.vision_tower_low.vision_tower.blocks.22.attn.proj.weight": "model-00001-of-00007.safetensors",
736
+ "vision_model.vision_tower_low.vision_tower.blocks.22.attn.qkv.bias": "model-00001-of-00007.safetensors",
737
+ "vision_model.vision_tower_low.vision_tower.blocks.22.attn.qkv.weight": "model-00001-of-00007.safetensors",
738
+ "vision_model.vision_tower_low.vision_tower.blocks.22.mlp.fc1.bias": "model-00001-of-00007.safetensors",
739
+ "vision_model.vision_tower_low.vision_tower.blocks.22.mlp.fc1.weight": "model-00001-of-00007.safetensors",
740
+ "vision_model.vision_tower_low.vision_tower.blocks.22.mlp.fc2.bias": "model-00001-of-00007.safetensors",
741
+ "vision_model.vision_tower_low.vision_tower.blocks.22.mlp.fc2.weight": "model-00001-of-00007.safetensors",
742
+ "vision_model.vision_tower_low.vision_tower.blocks.22.norm1.bias": "model-00001-of-00007.safetensors",
743
+ "vision_model.vision_tower_low.vision_tower.blocks.22.norm1.weight": "model-00001-of-00007.safetensors",
744
+ "vision_model.vision_tower_low.vision_tower.blocks.22.norm2.bias": "model-00001-of-00007.safetensors",
745
+ "vision_model.vision_tower_low.vision_tower.blocks.22.norm2.weight": "model-00001-of-00007.safetensors",
746
+ "vision_model.vision_tower_low.vision_tower.blocks.23.attn.proj.bias": "model-00001-of-00007.safetensors",
747
+ "vision_model.vision_tower_low.vision_tower.blocks.23.attn.proj.weight": "model-00001-of-00007.safetensors",
748
+ "vision_model.vision_tower_low.vision_tower.blocks.23.attn.qkv.bias": "model-00001-of-00007.safetensors",
749
+ "vision_model.vision_tower_low.vision_tower.blocks.23.attn.qkv.weight": "model-00001-of-00007.safetensors",
750
+ "vision_model.vision_tower_low.vision_tower.blocks.23.mlp.fc1.bias": "model-00001-of-00007.safetensors",
751
+ "vision_model.vision_tower_low.vision_tower.blocks.23.mlp.fc1.weight": "model-00001-of-00007.safetensors",
752
+ "vision_model.vision_tower_low.vision_tower.blocks.23.mlp.fc2.bias": "model-00001-of-00007.safetensors",
753
+ "vision_model.vision_tower_low.vision_tower.blocks.23.mlp.fc2.weight": "model-00001-of-00007.safetensors",
754
+ "vision_model.vision_tower_low.vision_tower.blocks.23.norm1.bias": "model-00001-of-00007.safetensors",
755
+ "vision_model.vision_tower_low.vision_tower.blocks.23.norm1.weight": "model-00001-of-00007.safetensors",
756
+ "vision_model.vision_tower_low.vision_tower.blocks.23.norm2.bias": "model-00001-of-00007.safetensors",
757
+ "vision_model.vision_tower_low.vision_tower.blocks.23.norm2.weight": "model-00001-of-00007.safetensors",
758
+ "vision_model.vision_tower_low.vision_tower.blocks.3.attn.proj.bias": "model-00001-of-00007.safetensors",
759
+ "vision_model.vision_tower_low.vision_tower.blocks.3.attn.proj.weight": "model-00001-of-00007.safetensors",
760
+ "vision_model.vision_tower_low.vision_tower.blocks.3.attn.qkv.bias": "model-00001-of-00007.safetensors",
761
+ "vision_model.vision_tower_low.vision_tower.blocks.3.attn.qkv.weight": "model-00001-of-00007.safetensors",
762
+ "vision_model.vision_tower_low.vision_tower.blocks.3.mlp.fc1.bias": "model-00001-of-00007.safetensors",
763
+ "vision_model.vision_tower_low.vision_tower.blocks.3.mlp.fc1.weight": "model-00001-of-00007.safetensors",
764
+ "vision_model.vision_tower_low.vision_tower.blocks.3.mlp.fc2.bias": "model-00001-of-00007.safetensors",
765
+ "vision_model.vision_tower_low.vision_tower.blocks.3.mlp.fc2.weight": "model-00001-of-00007.safetensors",
766
+ "vision_model.vision_tower_low.vision_tower.blocks.3.norm1.bias": "model-00001-of-00007.safetensors",
767
+ "vision_model.vision_tower_low.vision_tower.blocks.3.norm1.weight": "model-00001-of-00007.safetensors",
768
+ "vision_model.vision_tower_low.vision_tower.blocks.3.norm2.bias": "model-00001-of-00007.safetensors",
769
+ "vision_model.vision_tower_low.vision_tower.blocks.3.norm2.weight": "model-00001-of-00007.safetensors",
770
+ "vision_model.vision_tower_low.vision_tower.blocks.4.attn.proj.bias": "model-00001-of-00007.safetensors",
771
+ "vision_model.vision_tower_low.vision_tower.blocks.4.attn.proj.weight": "model-00001-of-00007.safetensors",
772
+ "vision_model.vision_tower_low.vision_tower.blocks.4.attn.qkv.bias": "model-00001-of-00007.safetensors",
773
+ "vision_model.vision_tower_low.vision_tower.blocks.4.attn.qkv.weight": "model-00001-of-00007.safetensors",
774
+ "vision_model.vision_tower_low.vision_tower.blocks.4.mlp.fc1.bias": "model-00001-of-00007.safetensors",
775
+ "vision_model.vision_tower_low.vision_tower.blocks.4.mlp.fc1.weight": "model-00001-of-00007.safetensors",
776
+ "vision_model.vision_tower_low.vision_tower.blocks.4.mlp.fc2.bias": "model-00001-of-00007.safetensors",
777
+ "vision_model.vision_tower_low.vision_tower.blocks.4.mlp.fc2.weight": "model-00001-of-00007.safetensors",
778
+ "vision_model.vision_tower_low.vision_tower.blocks.4.norm1.bias": "model-00001-of-00007.safetensors",
779
+ "vision_model.vision_tower_low.vision_tower.blocks.4.norm1.weight": "model-00001-of-00007.safetensors",
780
+ "vision_model.vision_tower_low.vision_tower.blocks.4.norm2.bias": "model-00001-of-00007.safetensors",
781
+ "vision_model.vision_tower_low.vision_tower.blocks.4.norm2.weight": "model-00001-of-00007.safetensors",
782
+ "vision_model.vision_tower_low.vision_tower.blocks.5.attn.proj.bias": "model-00001-of-00007.safetensors",
783
+ "vision_model.vision_tower_low.vision_tower.blocks.5.attn.proj.weight": "model-00001-of-00007.safetensors",
784
+ "vision_model.vision_tower_low.vision_tower.blocks.5.attn.qkv.bias": "model-00001-of-00007.safetensors",
785
+ "vision_model.vision_tower_low.vision_tower.blocks.5.attn.qkv.weight": "model-00001-of-00007.safetensors",
786
+ "vision_model.vision_tower_low.vision_tower.blocks.5.mlp.fc1.bias": "model-00001-of-00007.safetensors",
787
+ "vision_model.vision_tower_low.vision_tower.blocks.5.mlp.fc1.weight": "model-00001-of-00007.safetensors",
788
+ "vision_model.vision_tower_low.vision_tower.blocks.5.mlp.fc2.bias": "model-00001-of-00007.safetensors",
789
+ "vision_model.vision_tower_low.vision_tower.blocks.5.mlp.fc2.weight": "model-00001-of-00007.safetensors",
790
+ "vision_model.vision_tower_low.vision_tower.blocks.5.norm1.bias": "model-00001-of-00007.safetensors",
791
+ "vision_model.vision_tower_low.vision_tower.blocks.5.norm1.weight": "model-00001-of-00007.safetensors",
792
+ "vision_model.vision_tower_low.vision_tower.blocks.5.norm2.bias": "model-00001-of-00007.safetensors",
793
+ "vision_model.vision_tower_low.vision_tower.blocks.5.norm2.weight": "model-00001-of-00007.safetensors",
794
+ "vision_model.vision_tower_low.vision_tower.blocks.6.attn.proj.bias": "model-00001-of-00007.safetensors",
795
+ "vision_model.vision_tower_low.vision_tower.blocks.6.attn.proj.weight": "model-00001-of-00007.safetensors",
796
+ "vision_model.vision_tower_low.vision_tower.blocks.6.attn.qkv.bias": "model-00001-of-00007.safetensors",
797
+ "vision_model.vision_tower_low.vision_tower.blocks.6.attn.qkv.weight": "model-00001-of-00007.safetensors",
798
+ "vision_model.vision_tower_low.vision_tower.blocks.6.mlp.fc1.bias": "model-00001-of-00007.safetensors",
799
+ "vision_model.vision_tower_low.vision_tower.blocks.6.mlp.fc1.weight": "model-00001-of-00007.safetensors",
800
+ "vision_model.vision_tower_low.vision_tower.blocks.6.mlp.fc2.bias": "model-00001-of-00007.safetensors",
801
+ "vision_model.vision_tower_low.vision_tower.blocks.6.mlp.fc2.weight": "model-00001-of-00007.safetensors",
802
+ "vision_model.vision_tower_low.vision_tower.blocks.6.norm1.bias": "model-00001-of-00007.safetensors",
803
+ "vision_model.vision_tower_low.vision_tower.blocks.6.norm1.weight": "model-00001-of-00007.safetensors",
804
+ "vision_model.vision_tower_low.vision_tower.blocks.6.norm2.bias": "model-00001-of-00007.safetensors",
805
+ "vision_model.vision_tower_low.vision_tower.blocks.6.norm2.weight": "model-00001-of-00007.safetensors",
806
+ "vision_model.vision_tower_low.vision_tower.blocks.7.attn.proj.bias": "model-00001-of-00007.safetensors",
807
+ "vision_model.vision_tower_low.vision_tower.blocks.7.attn.proj.weight": "model-00001-of-00007.safetensors",
808
+ "vision_model.vision_tower_low.vision_tower.blocks.7.attn.qkv.bias": "model-00001-of-00007.safetensors",
809
+ "vision_model.vision_tower_low.vision_tower.blocks.7.attn.qkv.weight": "model-00001-of-00007.safetensors",
810
+ "vision_model.vision_tower_low.vision_tower.blocks.7.mlp.fc1.bias": "model-00001-of-00007.safetensors",
811
+ "vision_model.vision_tower_low.vision_tower.blocks.7.mlp.fc1.weight": "model-00001-of-00007.safetensors",
812
+ "vision_model.vision_tower_low.vision_tower.blocks.7.mlp.fc2.bias": "model-00001-of-00007.safetensors",
813
+ "vision_model.vision_tower_low.vision_tower.blocks.7.mlp.fc2.weight": "model-00001-of-00007.safetensors",
814
+ "vision_model.vision_tower_low.vision_tower.blocks.7.norm1.bias": "model-00001-of-00007.safetensors",
815
+ "vision_model.vision_tower_low.vision_tower.blocks.7.norm1.weight": "model-00001-of-00007.safetensors",
816
+ "vision_model.vision_tower_low.vision_tower.blocks.7.norm2.bias": "model-00001-of-00007.safetensors",
817
+ "vision_model.vision_tower_low.vision_tower.blocks.7.norm2.weight": "model-00001-of-00007.safetensors",
818
+ "vision_model.vision_tower_low.vision_tower.blocks.8.attn.proj.bias": "model-00001-of-00007.safetensors",
819
+ "vision_model.vision_tower_low.vision_tower.blocks.8.attn.proj.weight": "model-00001-of-00007.safetensors",
820
+ "vision_model.vision_tower_low.vision_tower.blocks.8.attn.qkv.bias": "model-00001-of-00007.safetensors",
821
+ "vision_model.vision_tower_low.vision_tower.blocks.8.attn.qkv.weight": "model-00001-of-00007.safetensors",
822
+ "vision_model.vision_tower_low.vision_tower.blocks.8.mlp.fc1.bias": "model-00001-of-00007.safetensors",
823
+ "vision_model.vision_tower_low.vision_tower.blocks.8.mlp.fc1.weight": "model-00001-of-00007.safetensors",
824
+ "vision_model.vision_tower_low.vision_tower.blocks.8.mlp.fc2.bias": "model-00001-of-00007.safetensors",
825
+ "vision_model.vision_tower_low.vision_tower.blocks.8.mlp.fc2.weight": "model-00001-of-00007.safetensors",
826
+ "vision_model.vision_tower_low.vision_tower.blocks.8.norm1.bias": "model-00001-of-00007.safetensors",
827
+ "vision_model.vision_tower_low.vision_tower.blocks.8.norm1.weight": "model-00001-of-00007.safetensors",
828
+ "vision_model.vision_tower_low.vision_tower.blocks.8.norm2.bias": "model-00001-of-00007.safetensors",
829
+ "vision_model.vision_tower_low.vision_tower.blocks.8.norm2.weight": "model-00001-of-00007.safetensors",
830
+ "vision_model.vision_tower_low.vision_tower.blocks.9.attn.proj.bias": "model-00001-of-00007.safetensors",
831
+ "vision_model.vision_tower_low.vision_tower.blocks.9.attn.proj.weight": "model-00001-of-00007.safetensors",
832
+ "vision_model.vision_tower_low.vision_tower.blocks.9.attn.qkv.bias": "model-00001-of-00007.safetensors",
833
+ "vision_model.vision_tower_low.vision_tower.blocks.9.attn.qkv.weight": "model-00001-of-00007.safetensors",
834
+ "vision_model.vision_tower_low.vision_tower.blocks.9.mlp.fc1.bias": "model-00001-of-00007.safetensors",
835
+ "vision_model.vision_tower_low.vision_tower.blocks.9.mlp.fc1.weight": "model-00001-of-00007.safetensors",
836
+ "vision_model.vision_tower_low.vision_tower.blocks.9.mlp.fc2.bias": "model-00001-of-00007.safetensors",
837
+ "vision_model.vision_tower_low.vision_tower.blocks.9.mlp.fc2.weight": "model-00001-of-00007.safetensors",
838
+ "vision_model.vision_tower_low.vision_tower.blocks.9.norm1.bias": "model-00001-of-00007.safetensors",
839
+ "vision_model.vision_tower_low.vision_tower.blocks.9.norm1.weight": "model-00001-of-00007.safetensors",
840
+ "vision_model.vision_tower_low.vision_tower.blocks.9.norm2.bias": "model-00001-of-00007.safetensors",
841
+ "vision_model.vision_tower_low.vision_tower.blocks.9.norm2.weight": "model-00001-of-00007.safetensors",
842
+ "vision_model.vision_tower_low.vision_tower.norm.bias": "model-00001-of-00007.safetensors",
843
+ "vision_model.vision_tower_low.vision_tower.norm.weight": "model-00001-of-00007.safetensors",
844
+ "vision_model.vision_tower_low.vision_tower.patch_embed.proj.bias": "model-00001-of-00007.safetensors",
845
+ "vision_model.vision_tower_low.vision_tower.patch_embed.proj.weight": "model-00001-of-00007.safetensors",
846
+ "vision_model.vision_tower_low.vision_tower.pos_embed": "model-00001-of-00007.safetensors"
847
+ }
848
+ }
preprocessor_config.json ADDED
@@ -0,0 +1,23 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "background_color": [
3
+ 122,
4
+ 116,
5
+ 104
6
+ ],
7
+ "do_normalize": false,
8
+ "image_mean": [
9
+ 0.48145466,
10
+ 0.4578275,
11
+ 0.40821073
12
+ ],
13
+ "image_processor_type": "VLMImageProcessor",
14
+ "image_size": 1024,
15
+ "image_std": [
16
+ 0.26862954,
17
+ 0.26130258,
18
+ 0.27577711
19
+ ],
20
+ "min_size": 14,
21
+ "processor_class": "Qwen2vlmProcessor",
22
+ "rescale_factor": 0.00392156862745098
23
+ }
special_tokens_map.json ADDED
@@ -0,0 +1,32 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": [
3
+ "<|im_start|>",
4
+ "<|im_end|>",
5
+ "<|object_ref_start|>",
6
+ "<|object_ref_end|>",
7
+ "<|box_start|>",
8
+ "<|box_end|>",
9
+ "<|quad_start|>",
10
+ "<|quad_end|>",
11
+ "<|vision_start|>",
12
+ "<|vision_end|>",
13
+ "<|vision_pad|>",
14
+ "<|image_pad|>",
15
+ "<|video_pad|>",
16
+ "<|image|>"
17
+ ],
18
+ "eos_token": {
19
+ "content": "<|im_end|>",
20
+ "lstrip": false,
21
+ "normalized": false,
22
+ "rstrip": false,
23
+ "single_word": false
24
+ },
25
+ "pad_token": {
26
+ "content": "<|endoftext|>",
27
+ "lstrip": false,
28
+ "normalized": false,
29
+ "rstrip": false,
30
+ "single_word": false
31
+ }
32
+ }
tokenizer.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:020dd2a3fab5170fa5e566be16a6792841f56bdb90660b8a2e2145ac92327e58
3
+ size 11422082
tokenizer_config.json ADDED
@@ -0,0 +1,217 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_bos_token": false,
3
+ "add_prefix_space": false,
4
+ "added_tokens_decoder": {
5
+ "151643": {
6
+ "content": "<|endoftext|>",
7
+ "lstrip": false,
8
+ "normalized": false,
9
+ "rstrip": false,
10
+ "single_word": false,
11
+ "special": true
12
+ },
13
+ "151644": {
14
+ "content": "<|im_start|>",
15
+ "lstrip": false,
16
+ "normalized": false,
17
+ "rstrip": false,
18
+ "single_word": false,
19
+ "special": true
20
+ },
21
+ "151645": {
22
+ "content": "<|im_end|>",
23
+ "lstrip": false,
24
+ "normalized": false,
25
+ "rstrip": false,
26
+ "single_word": false,
27
+ "special": true
28
+ },
29
+ "151646": {
30
+ "content": "<|object_ref_start|>",
31
+ "lstrip": false,
32
+ "normalized": false,
33
+ "rstrip": false,
34
+ "single_word": false,
35
+ "special": true
36
+ },
37
+ "151647": {
38
+ "content": "<|object_ref_end|>",
39
+ "lstrip": false,
40
+ "normalized": false,
41
+ "rstrip": false,
42
+ "single_word": false,
43
+ "special": true
44
+ },
45
+ "151648": {
46
+ "content": "<|box_start|>",
47
+ "lstrip": false,
48
+ "normalized": false,
49
+ "rstrip": false,
50
+ "single_word": false,
51
+ "special": true
52
+ },
53
+ "151649": {
54
+ "content": "<|box_end|>",
55
+ "lstrip": false,
56
+ "normalized": false,
57
+ "rstrip": false,
58
+ "single_word": false,
59
+ "special": true
60
+ },
61
+ "151650": {
62
+ "content": "<|quad_start|>",
63
+ "lstrip": false,
64
+ "normalized": false,
65
+ "rstrip": false,
66
+ "single_word": false,
67
+ "special": true
68
+ },
69
+ "151651": {
70
+ "content": "<|quad_end|>",
71
+ "lstrip": false,
72
+ "normalized": false,
73
+ "rstrip": false,
74
+ "single_word": false,
75
+ "special": true
76
+ },
77
+ "151652": {
78
+ "content": "<|vision_start|>",
79
+ "lstrip": false,
80
+ "normalized": false,
81
+ "rstrip": false,
82
+ "single_word": false,
83
+ "special": true
84
+ },
85
+ "151653": {
86
+ "content": "<|vision_end|>",
87
+ "lstrip": false,
88
+ "normalized": false,
89
+ "rstrip": false,
90
+ "single_word": false,
91
+ "special": true
92
+ },
93
+ "151654": {
94
+ "content": "<|vision_pad|>",
95
+ "lstrip": false,
96
+ "normalized": false,
97
+ "rstrip": false,
98
+ "single_word": false,
99
+ "special": true
100
+ },
101
+ "151655": {
102
+ "content": "<|image_pad|>",
103
+ "lstrip": false,
104
+ "normalized": false,
105
+ "rstrip": false,
106
+ "single_word": false,
107
+ "special": true
108
+ },
109
+ "151656": {
110
+ "content": "<|video_pad|>",
111
+ "lstrip": false,
112
+ "normalized": false,
113
+ "rstrip": false,
114
+ "single_word": false,
115
+ "special": true
116
+ },
117
+ "151657": {
118
+ "content": "<tool_call>",
119
+ "lstrip": false,
120
+ "normalized": false,
121
+ "rstrip": false,
122
+ "single_word": false,
123
+ "special": false
124
+ },
125
+ "151658": {
126
+ "content": "</tool_call>",
127
+ "lstrip": false,
128
+ "normalized": false,
129
+ "rstrip": false,
130
+ "single_word": false,
131
+ "special": false
132
+ },
133
+ "151659": {
134
+ "content": "<|fim_prefix|>",
135
+ "lstrip": false,
136
+ "normalized": false,
137
+ "rstrip": false,
138
+ "single_word": false,
139
+ "special": false
140
+ },
141
+ "151660": {
142
+ "content": "<|fim_middle|>",
143
+ "lstrip": false,
144
+ "normalized": false,
145
+ "rstrip": false,
146
+ "single_word": false,
147
+ "special": false
148
+ },
149
+ "151661": {
150
+ "content": "<|fim_suffix|>",
151
+ "lstrip": false,
152
+ "normalized": false,
153
+ "rstrip": false,
154
+ "single_word": false,
155
+ "special": false
156
+ },
157
+ "151662": {
158
+ "content": "<|fim_pad|>",
159
+ "lstrip": false,
160
+ "normalized": false,
161
+ "rstrip": false,
162
+ "single_word": false,
163
+ "special": false
164
+ },
165
+ "151663": {
166
+ "content": "<|repo_name|>",
167
+ "lstrip": false,
168
+ "normalized": false,
169
+ "rstrip": false,
170
+ "single_word": false,
171
+ "special": false
172
+ },
173
+ "151664": {
174
+ "content": "<|file_sep|>",
175
+ "lstrip": false,
176
+ "normalized": false,
177
+ "rstrip": false,
178
+ "single_word": false,
179
+ "special": false
180
+ },
181
+ "151665": {
182
+ "content": "<|image|>",
183
+ "lstrip": false,
184
+ "normalized": false,
185
+ "rstrip": false,
186
+ "single_word": false,
187
+ "special": true
188
+ }
189
+ },
190
+ "additional_special_tokens": [
191
+ "<|im_start|>",
192
+ "<|im_end|>",
193
+ "<|object_ref_start|>",
194
+ "<|object_ref_end|>",
195
+ "<|box_start|>",
196
+ "<|box_end|>",
197
+ "<|quad_start|>",
198
+ "<|quad_end|>",
199
+ "<|vision_start|>",
200
+ "<|vision_end|>",
201
+ "<|vision_pad|>",
202
+ "<|image_pad|>",
203
+ "<|video_pad|>",
204
+ "<|image|>"
205
+ ],
206
+ "bos_token": null,
207
+ "chat_template": "{%- if tools %}\n {{- '<|im_start|>system\\n' }}\n {%- if messages[0]['role'] == 'system' %}\n {{- messages[0]['content'] }}\n {%- else %}\n {{- 'Please reason step by step, and put your final answer within \\\\boxed{}.' }}\n {%- endif %}\n {{- \"\\n\\n# Tools\\n\\nYou may call one or more functions to assist with the user query.\\n\\nYou are provided with function signatures within <tools></tools> XML tags:\\n<tools>\" }}\n {%- for tool in tools %}\n {{- \"\\n\" }}\n {{- tool | tojson }}\n {%- endfor %}\n {{- \"\\n</tools>\\n\\nFor each function call, return a json object with function name and arguments within <tool_call></tool_call> XML tags:\\n<tool_call>\\n{{\\\"name\\\": <function-name>, \\\"arguments\\\": <args-json-object>}}\\n</tool_call><|im_end|>\\n\" }}\n{%- else %}\n {%- if messages[0]['role'] == 'system' %}\n {{- '<|im_start|>system\\n' + messages[0]['content'] + '<|im_end|>\\n' }}\n {%- else %}\n {{- '<|im_start|>system\\nPlease reason step by step, and put your final answer within \\\\boxed{}.<|im_end|>\\n' }}\n {%- endif %}\n{%- endif %}\n{%- for message in messages %}\n {%- if (message.role == \"user\") or (message.role == \"system\" and not loop.first) or (message.role == \"assistant\" and not message.tool_calls) %}\n {{- '<|im_start|>' + message.role + '\\n' + message.content + '<|im_end|>' + '\\n' }}\n {%- elif message.role == \"assistant\" %}\n {{- '<|im_start|>' + message.role }}\n {%- if message.content %}\n {{- '\\n' + message.content }}\n {%- endif %}\n {%- for tool_call in message.tool_calls %}\n {%- if tool_call.function is defined %}\n {%- set tool_call = tool_call.function %}\n {%- endif %}\n {{- '\\n<tool_call>\\n{\"name\": \"' }}\n {{- tool_call.name }}\n {{- '\", \"arguments\": ' }}\n {{- tool_call.arguments | tojson }}\n {{- '}\\n</tool_call>' }}\n {%- endfor %}\n {{- '<|im_end|>\\n' }}\n {%- elif message.role == \"tool\" %}\n {%- if (loop.index0 == 0) or (messages[loop.index0 - 1].role != \"tool\") %}\n {{- '<|im_start|>user' }}\n {%- endif %}\n {{- '\\n<tool_response>\\n' }}\n {{- message.content }}\n {{- '\\n</tool_response>' }}\n {%- if loop.last or (messages[loop.index0 + 1].role != \"tool\") %}\n {{- '<|im_end|>\\n' }}\n {%- endif %}\n {%- endif %}\n{%- endfor %}\n{%- if add_generation_prompt %}\n {{- '<|im_start|>assistant\\n' }}\n{%- endif %}\n",
208
+ "clean_up_tokenization_spaces": false,
209
+ "eos_token": "<|im_end|>",
210
+ "errors": "replace",
211
+ "model_max_length": 131072,
212
+ "pad_token": "<|endoftext|>",
213
+ "processor_class": "Qwen2vlmProcessor",
214
+ "split_special_tokens": false,
215
+ "tokenizer_class": "Qwen2Tokenizer",
216
+ "unk_token": null
217
+ }
trainer_state.json ADDED
@@ -0,0 +1,546 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_metric": null,
3
+ "best_model_checkpoint": null,
4
+ "epoch": 2.9238578680203045,
5
+ "eval_steps": 500,
6
+ "global_step": 72,
7
+ "is_hyper_param_search": false,
8
+ "is_local_process_zero": true,
9
+ "is_world_process_zero": true,
10
+ "log_history": [
11
+ {
12
+ "epoch": 0.04060913705583756,
13
+ "grad_norm": 2.231037139892578,
14
+ "learning_rate": 1e-05,
15
+ "loss": 0.6587,
16
+ "step": 1
17
+ },
18
+ {
19
+ "epoch": 0.08121827411167512,
20
+ "grad_norm": 2.3449997901916504,
21
+ "learning_rate": 2e-05,
22
+ "loss": 0.7422,
23
+ "step": 2
24
+ },
25
+ {
26
+ "epoch": 0.1218274111675127,
27
+ "grad_norm": 1.36476731300354,
28
+ "learning_rate": 1.9989930665413148e-05,
29
+ "loss": 0.5462,
30
+ "step": 3
31
+ },
32
+ {
33
+ "epoch": 0.16243654822335024,
34
+ "grad_norm": 1.2183294296264648,
35
+ "learning_rate": 1.9959742939952393e-05,
36
+ "loss": 0.52,
37
+ "step": 4
38
+ },
39
+ {
40
+ "epoch": 0.20304568527918782,
41
+ "grad_norm": 0.9061247706413269,
42
+ "learning_rate": 1.990949761767935e-05,
43
+ "loss": 0.5118,
44
+ "step": 5
45
+ },
46
+ {
47
+ "epoch": 0.2436548223350254,
48
+ "grad_norm": 0.8054011464118958,
49
+ "learning_rate": 1.98392958859863e-05,
50
+ "loss": 0.4539,
51
+ "step": 6
52
+ },
53
+ {
54
+ "epoch": 0.28426395939086296,
55
+ "grad_norm": 0.7858198881149292,
56
+ "learning_rate": 1.9749279121818235e-05,
57
+ "loss": 0.4697,
58
+ "step": 7
59
+ },
60
+ {
61
+ "epoch": 0.3248730964467005,
62
+ "grad_norm": 0.7081526517868042,
63
+ "learning_rate": 1.9639628606958535e-05,
64
+ "loss": 0.4,
65
+ "step": 8
66
+ },
67
+ {
68
+ "epoch": 0.36548223350253806,
69
+ "grad_norm": 0.7763380408287048,
70
+ "learning_rate": 1.9510565162951538e-05,
71
+ "loss": 0.4542,
72
+ "step": 9
73
+ },
74
+ {
75
+ "epoch": 0.40609137055837563,
76
+ "grad_norm": 0.6853049993515015,
77
+ "learning_rate": 1.9362348706397374e-05,
78
+ "loss": 0.4059,
79
+ "step": 10
80
+ },
81
+ {
82
+ "epoch": 0.4467005076142132,
83
+ "grad_norm": 0.7308188676834106,
84
+ "learning_rate": 1.919527772551451e-05,
85
+ "loss": 0.4162,
86
+ "step": 11
87
+ },
88
+ {
89
+ "epoch": 0.4873096446700508,
90
+ "grad_norm": 0.7724586129188538,
91
+ "learning_rate": 1.900968867902419e-05,
92
+ "loss": 0.4469,
93
+ "step": 12
94
+ },
95
+ {
96
+ "epoch": 0.5279187817258884,
97
+ "grad_norm": 0.6326901316642761,
98
+ "learning_rate": 1.880595531856738e-05,
99
+ "loss": 0.422,
100
+ "step": 13
101
+ },
102
+ {
103
+ "epoch": 0.5685279187817259,
104
+ "grad_norm": 0.6398388147354126,
105
+ "learning_rate": 1.8584487936018663e-05,
106
+ "loss": 0.4343,
107
+ "step": 14
108
+ },
109
+ {
110
+ "epoch": 0.6091370558375635,
111
+ "grad_norm": 0.5947191715240479,
112
+ "learning_rate": 1.834573253721303e-05,
113
+ "loss": 0.3745,
114
+ "step": 15
115
+ },
116
+ {
117
+ "epoch": 0.649746192893401,
118
+ "grad_norm": 0.6309987306594849,
119
+ "learning_rate": 1.8090169943749477e-05,
120
+ "loss": 0.3943,
121
+ "step": 16
122
+ },
123
+ {
124
+ "epoch": 0.6903553299492385,
125
+ "grad_norm": 0.6212602853775024,
126
+ "learning_rate": 1.78183148246803e-05,
127
+ "loss": 0.3954,
128
+ "step": 17
129
+ },
130
+ {
131
+ "epoch": 0.7309644670050761,
132
+ "grad_norm": 0.6566072702407837,
133
+ "learning_rate": 1.7530714660036112e-05,
134
+ "loss": 0.3788,
135
+ "step": 18
136
+ },
137
+ {
138
+ "epoch": 0.7715736040609137,
139
+ "grad_norm": 0.6458687782287598,
140
+ "learning_rate": 1.7227948638273918e-05,
141
+ "loss": 0.3884,
142
+ "step": 19
143
+ },
144
+ {
145
+ "epoch": 0.8121827411167513,
146
+ "grad_norm": 0.6482535004615784,
147
+ "learning_rate": 1.691062648986865e-05,
148
+ "loss": 0.4218,
149
+ "step": 20
150
+ },
151
+ {
152
+ "epoch": 0.8527918781725888,
153
+ "grad_norm": 0.6024514436721802,
154
+ "learning_rate": 1.657938725939713e-05,
155
+ "loss": 0.4183,
156
+ "step": 21
157
+ },
158
+ {
159
+ "epoch": 0.8934010152284264,
160
+ "grad_norm": 0.6119846105575562,
161
+ "learning_rate": 1.6234898018587336e-05,
162
+ "loss": 0.3988,
163
+ "step": 22
164
+ },
165
+ {
166
+ "epoch": 0.934010152284264,
167
+ "grad_norm": 0.5902426838874817,
168
+ "learning_rate": 1.5877852522924733e-05,
169
+ "loss": 0.3743,
170
+ "step": 23
171
+ },
172
+ {
173
+ "epoch": 0.9746192893401016,
174
+ "grad_norm": 0.5760794878005981,
175
+ "learning_rate": 1.5508969814521026e-05,
176
+ "loss": 0.385,
177
+ "step": 24
178
+ },
179
+ {
180
+ "epoch": 1.015228426395939,
181
+ "grad_norm": 0.5974088907241821,
182
+ "learning_rate": 1.5128992774059063e-05,
183
+ "loss": 0.3249,
184
+ "step": 25
185
+ },
186
+ {
187
+ "epoch": 1.0558375634517767,
188
+ "grad_norm": 0.44053706526756287,
189
+ "learning_rate": 1.4738686624729987e-05,
190
+ "loss": 0.2409,
191
+ "step": 26
192
+ },
193
+ {
194
+ "epoch": 1.0964467005076142,
195
+ "grad_norm": 0.46850693225860596,
196
+ "learning_rate": 1.4338837391175582e-05,
197
+ "loss": 0.2628,
198
+ "step": 27
199
+ },
200
+ {
201
+ "epoch": 1.1370558375634519,
202
+ "grad_norm": 0.47877609729766846,
203
+ "learning_rate": 1.3930250316539237e-05,
204
+ "loss": 0.248,
205
+ "step": 28
206
+ },
207
+ {
208
+ "epoch": 1.1776649746192893,
209
+ "grad_norm": 0.503269374370575,
210
+ "learning_rate": 1.3513748240813429e-05,
211
+ "loss": 0.2306,
212
+ "step": 29
213
+ },
214
+ {
215
+ "epoch": 1.218274111675127,
216
+ "grad_norm": 0.4539880156517029,
217
+ "learning_rate": 1.3090169943749475e-05,
218
+ "loss": 0.2199,
219
+ "step": 30
220
+ },
221
+ {
222
+ "epoch": 1.2588832487309645,
223
+ "grad_norm": 0.6157008409500122,
224
+ "learning_rate": 1.2660368455666752e-05,
225
+ "loss": 0.2287,
226
+ "step": 31
227
+ },
228
+ {
229
+ "epoch": 1.299492385786802,
230
+ "grad_norm": 0.4458495080471039,
231
+ "learning_rate": 1.2225209339563144e-05,
232
+ "loss": 0.2238,
233
+ "step": 32
234
+ },
235
+ {
236
+ "epoch": 1.3401015228426396,
237
+ "grad_norm": 0.44391369819641113,
238
+ "learning_rate": 1.1785568947986368e-05,
239
+ "loss": 0.215,
240
+ "step": 33
241
+ },
242
+ {
243
+ "epoch": 1.380710659898477,
244
+ "grad_norm": 0.46967068314552307,
245
+ "learning_rate": 1.1342332658176556e-05,
246
+ "loss": 0.2079,
247
+ "step": 34
248
+ },
249
+ {
250
+ "epoch": 1.4213197969543148,
251
+ "grad_norm": 0.44738730788230896,
252
+ "learning_rate": 1.0896393089034336e-05,
253
+ "loss": 0.2095,
254
+ "step": 35
255
+ },
256
+ {
257
+ "epoch": 1.4619289340101522,
258
+ "grad_norm": 0.438690721988678,
259
+ "learning_rate": 1.044864830350515e-05,
260
+ "loss": 0.2081,
261
+ "step": 36
262
+ },
263
+ {
264
+ "epoch": 1.50253807106599,
265
+ "grad_norm": 0.43093299865722656,
266
+ "learning_rate": 1e-05,
267
+ "loss": 0.1841,
268
+ "step": 37
269
+ },
270
+ {
271
+ "epoch": 1.5431472081218274,
272
+ "grad_norm": 0.47970595955848694,
273
+ "learning_rate": 9.551351696494854e-06,
274
+ "loss": 0.2152,
275
+ "step": 38
276
+ },
277
+ {
278
+ "epoch": 1.5837563451776648,
279
+ "grad_norm": 0.4089057147502899,
280
+ "learning_rate": 9.103606910965666e-06,
281
+ "loss": 0.1878,
282
+ "step": 39
283
+ },
284
+ {
285
+ "epoch": 1.6243654822335025,
286
+ "grad_norm": 0.4573724567890167,
287
+ "learning_rate": 8.657667341823449e-06,
288
+ "loss": 0.1955,
289
+ "step": 40
290
+ },
291
+ {
292
+ "epoch": 1.6649746192893402,
293
+ "grad_norm": 0.46062809228897095,
294
+ "learning_rate": 8.214431052013636e-06,
295
+ "loss": 0.1898,
296
+ "step": 41
297
+ },
298
+ {
299
+ "epoch": 1.7055837563451777,
300
+ "grad_norm": 0.47705650329589844,
301
+ "learning_rate": 7.774790660436857e-06,
302
+ "loss": 0.2261,
303
+ "step": 42
304
+ },
305
+ {
306
+ "epoch": 1.7461928934010151,
307
+ "grad_norm": 0.4647163450717926,
308
+ "learning_rate": 7.33963154433325e-06,
309
+ "loss": 0.1794,
310
+ "step": 43
311
+ },
312
+ {
313
+ "epoch": 1.7868020304568528,
314
+ "grad_norm": 0.47383496165275574,
315
+ "learning_rate": 6.909830056250527e-06,
316
+ "loss": 0.1966,
317
+ "step": 44
318
+ },
319
+ {
320
+ "epoch": 1.8274111675126905,
321
+ "grad_norm": 0.4777352511882782,
322
+ "learning_rate": 6.486251759186573e-06,
323
+ "loss": 0.1909,
324
+ "step": 45
325
+ },
326
+ {
327
+ "epoch": 1.868020304568528,
328
+ "grad_norm": 0.5107164978981018,
329
+ "learning_rate": 6.069749683460765e-06,
330
+ "loss": 0.2004,
331
+ "step": 46
332
+ },
333
+ {
334
+ "epoch": 1.9086294416243654,
335
+ "grad_norm": 1.4848881959915161,
336
+ "learning_rate": 5.66116260882442e-06,
337
+ "loss": 0.1932,
338
+ "step": 47
339
+ },
340
+ {
341
+ "epoch": 1.9492385786802031,
342
+ "grad_norm": 0.43872544169425964,
343
+ "learning_rate": 5.2613133752700145e-06,
344
+ "loss": 0.1874,
345
+ "step": 48
346
+ },
347
+ {
348
+ "epoch": 1.9898477157360406,
349
+ "grad_norm": 0.5519695281982422,
350
+ "learning_rate": 4.87100722594094e-06,
351
+ "loss": 0.1995,
352
+ "step": 49
353
+ },
354
+ {
355
+ "epoch": 2.030456852791878,
356
+ "grad_norm": 0.4047316610813141,
357
+ "learning_rate": 4.491030185478976e-06,
358
+ "loss": 0.1541,
359
+ "step": 50
360
+ },
361
+ {
362
+ "epoch": 2.0710659898477157,
363
+ "grad_norm": 0.39870932698249817,
364
+ "learning_rate": 4.12214747707527e-06,
365
+ "loss": 0.1425,
366
+ "step": 51
367
+ },
368
+ {
369
+ "epoch": 2.1116751269035534,
370
+ "grad_norm": 0.3946266174316406,
371
+ "learning_rate": 3.7651019814126656e-06,
372
+ "loss": 0.1455,
373
+ "step": 52
374
+ },
375
+ {
376
+ "epoch": 2.152284263959391,
377
+ "grad_norm": 0.37286555767059326,
378
+ "learning_rate": 3.4206127406028744e-06,
379
+ "loss": 0.127,
380
+ "step": 53
381
+ },
382
+ {
383
+ "epoch": 2.1928934010152283,
384
+ "grad_norm": 0.3995823264122009,
385
+ "learning_rate": 3.089373510131354e-06,
386
+ "loss": 0.1484,
387
+ "step": 54
388
+ },
389
+ {
390
+ "epoch": 2.233502538071066,
391
+ "grad_norm": 0.3770010471343994,
392
+ "learning_rate": 2.7720513617260857e-06,
393
+ "loss": 0.1344,
394
+ "step": 55
395
+ },
396
+ {
397
+ "epoch": 2.2741116751269037,
398
+ "grad_norm": 0.36149802803993225,
399
+ "learning_rate": 2.469285339963892e-06,
400
+ "loss": 0.1243,
401
+ "step": 56
402
+ },
403
+ {
404
+ "epoch": 2.314720812182741,
405
+ "grad_norm": 0.36556974053382874,
406
+ "learning_rate": 2.1816851753197023e-06,
407
+ "loss": 0.1271,
408
+ "step": 57
409
+ },
410
+ {
411
+ "epoch": 2.3553299492385786,
412
+ "grad_norm": 0.3600409924983978,
413
+ "learning_rate": 1.9098300562505266e-06,
414
+ "loss": 0.1233,
415
+ "step": 58
416
+ },
417
+ {
418
+ "epoch": 2.3959390862944163,
419
+ "grad_norm": 0.3835366368293762,
420
+ "learning_rate": 1.6542674627869738e-06,
421
+ "loss": 0.1194,
422
+ "step": 59
423
+ },
424
+ {
425
+ "epoch": 2.436548223350254,
426
+ "grad_norm": 0.38183608651161194,
427
+ "learning_rate": 1.4155120639813392e-06,
428
+ "loss": 0.1407,
429
+ "step": 60
430
+ },
431
+ {
432
+ "epoch": 2.4771573604060912,
433
+ "grad_norm": 0.3950784206390381,
434
+ "learning_rate": 1.19404468143262e-06,
435
+ "loss": 0.1265,
436
+ "step": 61
437
+ },
438
+ {
439
+ "epoch": 2.517766497461929,
440
+ "grad_norm": 0.47062575817108154,
441
+ "learning_rate": 9.903113209758098e-07,
442
+ "loss": 0.1124,
443
+ "step": 62
444
+ },
445
+ {
446
+ "epoch": 2.5583756345177666,
447
+ "grad_norm": 0.40783652663230896,
448
+ "learning_rate": 8.047222744854943e-07,
449
+ "loss": 0.1381,
450
+ "step": 63
451
+ },
452
+ {
453
+ "epoch": 2.598984771573604,
454
+ "grad_norm": 0.45733270049095154,
455
+ "learning_rate": 6.37651293602628e-07,
456
+ "loss": 0.1211,
457
+ "step": 64
458
+ },
459
+ {
460
+ "epoch": 2.6395939086294415,
461
+ "grad_norm": 0.3771652579307556,
462
+ "learning_rate": 4.894348370484648e-07,
463
+ "loss": 0.1161,
464
+ "step": 65
465
+ },
466
+ {
467
+ "epoch": 2.6802030456852792,
468
+ "grad_norm": 0.38524389266967773,
469
+ "learning_rate": 3.603713930414676e-07,
470
+ "loss": 0.1191,
471
+ "step": 66
472
+ },
473
+ {
474
+ "epoch": 2.720812182741117,
475
+ "grad_norm": 0.3731960356235504,
476
+ "learning_rate": 2.507208781817638e-07,
477
+ "loss": 0.1071,
478
+ "step": 67
479
+ },
480
+ {
481
+ "epoch": 2.761421319796954,
482
+ "grad_norm": 0.4229435920715332,
483
+ "learning_rate": 1.6070411401370335e-07,
484
+ "loss": 0.1267,
485
+ "step": 68
486
+ },
487
+ {
488
+ "epoch": 2.802030456852792,
489
+ "grad_norm": 0.3722592294216156,
490
+ "learning_rate": 9.0502382320653e-08,
491
+ "loss": 0.1235,
492
+ "step": 69
493
+ },
494
+ {
495
+ "epoch": 2.8426395939086295,
496
+ "grad_norm": 0.4112900495529175,
497
+ "learning_rate": 4.025706004760932e-08,
498
+ "loss": 0.1173,
499
+ "step": 70
500
+ },
501
+ {
502
+ "epoch": 2.8832487309644668,
503
+ "grad_norm": 0.35562577843666077,
504
+ "learning_rate": 1.0069334586854106e-08,
505
+ "loss": 0.1143,
506
+ "step": 71
507
+ },
508
+ {
509
+ "epoch": 2.9238578680203045,
510
+ "grad_norm": 0.427130788564682,
511
+ "learning_rate": 0.0,
512
+ "loss": 0.1234,
513
+ "step": 72
514
+ },
515
+ {
516
+ "epoch": 2.9238578680203045,
517
+ "step": 72,
518
+ "total_flos": 1.856511114477568e+16,
519
+ "train_loss": 0.2654210272141629,
520
+ "train_runtime": 1195.1529,
521
+ "train_samples_per_second": 7.902,
522
+ "train_steps_per_second": 0.06
523
+ }
524
+ ],
525
+ "logging_steps": 1.0,
526
+ "max_steps": 72,
527
+ "num_input_tokens_seen": 0,
528
+ "num_train_epochs": 3,
529
+ "save_steps": 26,
530
+ "stateful_callbacks": {
531
+ "TrainerControl": {
532
+ "args": {
533
+ "should_epoch_stop": false,
534
+ "should_evaluate": false,
535
+ "should_log": false,
536
+ "should_save": true,
537
+ "should_training_stop": true
538
+ },
539
+ "attributes": {}
540
+ }
541
+ },
542
+ "total_flos": 1.856511114477568e+16,
543
+ "train_batch_size": 1,
544
+ "trial_name": null,
545
+ "trial_params": null
546
+ }
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:692c26c0bf7c477b8d066681651bd6011db5ae8b5c6c48a74107b8e67ced4c0c
3
+ size 5752
vocab.json ADDED
The diff for this file is too large to render. See raw diff