ColPali
Safetensors
English
qwen2_vl
manu commited on
Commit
e9110fa
·
verified ·
1 Parent(s): 1ad9c96

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +40 -211
config.json CHANGED
@@ -1,219 +1,48 @@
1
  {
 
2
  "architectures": [
3
  "ColQwen2"
4
  ],
5
- "embedding_dim": 128,
 
 
 
 
 
6
  "initializer_range": 0.02,
7
- "is_composition": false,
8
- "model_type": "colqwen2",
9
- "torch_dtype": "bfloat16",
10
- "transformers_version": "4.53.0.dev0",
11
- "vlm_config": {
12
- "_name_or_path": "Qwen/Qwen2-VL-2B-Instruct",
13
- "attention_dropout": 0.0,
14
- "bos_token_id": 151643,
15
- "eos_token_id": 151645,
16
- "hidden_act": "silu",
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
17
  "hidden_size": 1536,
18
- "image_token_id": 151655,
19
- "initializer_range": 0.02,
20
- "intermediate_size": 8960,
21
- "max_position_embeddings": 32768,
22
- "max_window_layers": 28,
23
  "model_type": "qwen2_vl",
24
- "num_attention_heads": 12,
25
- "num_hidden_layers": 28,
26
- "num_key_value_heads": 2,
27
- "rms_norm_eps": 1e-06,
28
- "rope_scaling": {
29
- "mrope_section": [
30
- 16,
31
- 24,
32
- 24
33
- ],
34
- "rope_type": "default",
35
- "type": "default"
36
- },
37
- "rope_theta": 1000000.0,
38
- "sliding_window": 32768,
39
- "text_config": {
40
- "_name_or_path": "vidore/colqwen2-base",
41
- "add_cross_attention": false,
42
- "architectures": [
43
- "ColQwen2"
44
- ],
45
- "attention_dropout": 0.0,
46
- "bad_words_ids": null,
47
- "begin_suppress_tokens": null,
48
- "bos_token_id": 151643,
49
- "chunk_size_feed_forward": 0,
50
- "cross_attention_hidden_size": null,
51
- "decoder_start_token_id": null,
52
- "diversity_penalty": 0.0,
53
- "do_sample": false,
54
- "early_stopping": false,
55
- "encoder_no_repeat_ngram_size": 0,
56
- "eos_token_id": 151645,
57
- "exponential_decay_length_penalty": null,
58
- "finetuning_task": null,
59
- "forced_bos_token_id": null,
60
- "forced_eos_token_id": null,
61
- "hidden_act": "silu",
62
- "hidden_size": 1536,
63
- "id2label": {
64
- "0": "LABEL_0",
65
- "1": "LABEL_1"
66
- },
67
- "image_token_id": null,
68
- "initializer_range": 0.02,
69
- "intermediate_size": 8960,
70
- "is_decoder": false,
71
- "is_encoder_decoder": false,
72
- "label2id": {
73
- "LABEL_0": 0,
74
- "LABEL_1": 1
75
- },
76
- "length_penalty": 1.0,
77
- "max_length": 20,
78
- "max_position_embeddings": 32768,
79
- "max_window_layers": 28,
80
- "min_length": 0,
81
- "model_type": "qwen2_vl_text",
82
- "no_repeat_ngram_size": 0,
83
- "num_attention_heads": 12,
84
- "num_beam_groups": 1,
85
- "num_beams": 1,
86
- "num_hidden_layers": 28,
87
- "num_key_value_heads": 2,
88
- "num_return_sequences": 1,
89
- "output_attentions": false,
90
- "output_hidden_states": false,
91
- "output_scores": false,
92
- "pad_token_id": null,
93
- "prefix": null,
94
- "problem_type": null,
95
- "pruned_heads": {},
96
- "remove_invalid_values": false,
97
- "repetition_penalty": 1.0,
98
- "return_dict": true,
99
- "return_dict_in_generate": false,
100
- "rms_norm_eps": 1e-06,
101
- "rope_scaling": {
102
- "mrope_section": [
103
- 16,
104
- 24,
105
- 24
106
- ],
107
- "rope_type": "default",
108
- "type": "default"
109
- },
110
- "rope_theta": 1000000.0,
111
- "sep_token_id": null,
112
- "sliding_window": 32768,
113
- "suppress_tokens": null,
114
- "task_specific_params": null,
115
- "temperature": 1.0,
116
- "tf_legacy_loss": false,
117
- "tie_encoder_decoder": false,
118
- "tie_word_embeddings": true,
119
- "tokenizer_class": null,
120
- "top_k": 50,
121
- "top_p": 1.0,
122
- "torch_dtype": "bfloat16",
123
- "torchscript": false,
124
- "typical_p": 1.0,
125
- "use_bfloat16": false,
126
- "use_cache": true,
127
- "use_sliding_window": false,
128
- "video_token_id": null,
129
- "vision_end_token_id": 151653,
130
- "vision_start_token_id": 151652,
131
- "vision_token_id": 151654,
132
- "vocab_size": 151936
133
- },
134
- "torch_dtype": "bfloat16",
135
- "use_cache": true,
136
- "use_sliding_window": false,
137
- "video_token_id": 151656,
138
- "vision_config": {
139
- "_name_or_path": "",
140
- "add_cross_attention": false,
141
- "architectures": null,
142
- "bad_words_ids": null,
143
- "begin_suppress_tokens": null,
144
- "bos_token_id": null,
145
- "chunk_size_feed_forward": 0,
146
- "cross_attention_hidden_size": null,
147
- "decoder_start_token_id": null,
148
- "depth": 32,
149
- "diversity_penalty": 0.0,
150
- "do_sample": false,
151
- "early_stopping": false,
152
- "embed_dim": 1280,
153
- "encoder_no_repeat_ngram_size": 0,
154
- "eos_token_id": null,
155
- "exponential_decay_length_penalty": null,
156
- "finetuning_task": null,
157
- "forced_bos_token_id": null,
158
- "forced_eos_token_id": null,
159
- "hidden_act": "quick_gelu",
160
- "hidden_size": 1536,
161
- "id2label": {
162
- "0": "LABEL_0",
163
- "1": "LABEL_1"
164
- },
165
- "in_channels": 3,
166
- "in_chans": 3,
167
- "initializer_range": 0.02,
168
- "is_decoder": false,
169
- "is_encoder_decoder": false,
170
- "label2id": {
171
- "LABEL_0": 0,
172
- "LABEL_1": 1
173
- },
174
- "length_penalty": 1.0,
175
- "max_length": 20,
176
- "min_length": 0,
177
- "mlp_ratio": 4,
178
- "model_type": "qwen2_vl",
179
- "no_repeat_ngram_size": 0,
180
- "num_beam_groups": 1,
181
- "num_beams": 1,
182
- "num_heads": 16,
183
- "num_return_sequences": 1,
184
- "output_attentions": false,
185
- "output_hidden_states": false,
186
- "output_scores": false,
187
- "pad_token_id": null,
188
- "patch_size": 14,
189
- "prefix": null,
190
- "problem_type": null,
191
- "pruned_heads": {},
192
- "remove_invalid_values": false,
193
- "repetition_penalty": 1.0,
194
- "return_dict": true,
195
- "return_dict_in_generate": false,
196
- "sep_token_id": null,
197
- "spatial_merge_size": 2,
198
- "spatial_patch_size": 14,
199
- "suppress_tokens": null,
200
- "task_specific_params": null,
201
- "temperature": 1.0,
202
- "temporal_patch_size": 2,
203
- "tf_legacy_loss": false,
204
- "tie_encoder_decoder": false,
205
- "tie_word_embeddings": true,
206
- "tokenizer_class": null,
207
- "top_k": 50,
208
- "top_p": 1.0,
209
- "torch_dtype": null,
210
- "torchscript": false,
211
- "typical_p": 1.0,
212
- "use_bfloat16": false
213
- },
214
- "vision_end_token_id": 151653,
215
- "vision_start_token_id": 151652,
216
- "vision_token_id": 151654,
217
- "vocab_size": 151936
218
- }
219
  }
 
1
  {
2
+ "_name_or_path": "/lus/home/CT10/cad15443/mfaysse/colpali/models/Qwen2-VL-2B-Instruct",
3
  "architectures": [
4
  "ColQwen2"
5
  ],
6
+ "attention_dropout": 0.0,
7
+ "bos_token_id": 151643,
8
+ "eos_token_id": 151645,
9
+ "hidden_act": "silu",
10
+ "hidden_size": 1536,
11
+ "image_token_id": 151655,
12
  "initializer_range": 0.02,
13
+ "intermediate_size": 8960,
14
+ "max_position_embeddings": 32768,
15
+ "max_window_layers": 28,
16
+ "model_type": "qwen2_vl",
17
+ "num_attention_heads": 12,
18
+ "num_hidden_layers": 28,
19
+ "num_key_value_heads": 2,
20
+ "rms_norm_eps": 1e-06,
21
+ "rope_scaling": {
22
+ "mrope_section": [
23
+ 16,
24
+ 24,
25
+ 24
26
+ ],
27
+ "rope_type": "default",
28
+ "type": "default"
29
+ },
30
+ "rope_theta": 1000000.0,
31
+ "sliding_window": 32768,
32
+ "tie_word_embeddings": true,
33
+ "torch_dtype": "float32",
34
+ "transformers_version": "4.45.0.dev0",
35
+ "use_cache": true,
36
+ "use_sliding_window": false,
37
+ "video_token_id": 151656,
38
+ "vision_config": {
39
  "hidden_size": 1536,
40
+ "in_chans": 3,
 
 
 
 
41
  "model_type": "qwen2_vl",
42
+ "spatial_patch_size": 14
43
+ },
44
+ "vision_end_token_id": 151653,
45
+ "vision_start_token_id": 151652,
46
+ "vision_token_id": 151654,
47
+ "vocab_size": 151936
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
48
  }