Yin-Xie commited on
Commit
724090d
·
verified ·
1 Parent(s): a38b332

Upload config.json with huggingface_hub

Browse files
Files changed (1) hide show
  1. config.json +139 -0
config.json ADDED
@@ -0,0 +1,139 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "./checkpoints/mlcd-embodied-pretrain",
3
+ "add_faster_video": false,
4
+ "add_time_instruction": false,
5
+ "architectures": [
6
+ "LlavaQwenForCausalLM"
7
+ ],
8
+ "attention_dropout": 0.0,
9
+ "bos_token_id": 151643,
10
+ "eos_token_id": 151645,
11
+ "faster_token_stride": 10,
12
+ "force_sample": false,
13
+ "hidden_act": "silu",
14
+ "hidden_size": 3584,
15
+ "image_aspect_ratio": "anyres",
16
+ "image_crop_resolution": null,
17
+ "image_grid_pinpoints": [
18
+ [
19
+ 336,
20
+ 336
21
+ ],
22
+ [
23
+ 336,
24
+ 672
25
+ ],
26
+ [
27
+ 336,
28
+ 1008
29
+ ],
30
+ [
31
+ 336,
32
+ 1344
33
+ ],
34
+ [
35
+ 336,
36
+ 1680
37
+ ],
38
+ [
39
+ 336,
40
+ 2016
41
+ ],
42
+ [
43
+ 672,
44
+ 336
45
+ ],
46
+ [
47
+ 672,
48
+ 672
49
+ ],
50
+ [
51
+ 672,
52
+ 1008
53
+ ],
54
+ [
55
+ 672,
56
+ 1344
57
+ ],
58
+ [
59
+ 672,
60
+ 1680
61
+ ],
62
+ [
63
+ 1008,
64
+ 336
65
+ ],
66
+ [
67
+ 1008,
68
+ 672
69
+ ],
70
+ [
71
+ 1008,
72
+ 1008
73
+ ],
74
+ [
75
+ 1008,
76
+ 1344
77
+ ],
78
+ [
79
+ 1008,
80
+ 1680
81
+ ],
82
+ [
83
+ 1344,
84
+ 336
85
+ ],
86
+ [
87
+ 1344,
88
+ 672
89
+ ],
90
+ [
91
+ 1344,
92
+ 1008
93
+ ],
94
+ [
95
+ 1344,
96
+ 1344
97
+ ]
98
+ ],
99
+ "image_split_resolution": null,
100
+ "initializer_range": 0.02,
101
+ "intermediate_size": 18944,
102
+ "max_position_embeddings": 32768,
103
+ "max_window_layers": 28,
104
+ "mm_hidden_size": 1024,
105
+ "mm_newline_position": "grid",
106
+ "mm_patch_merge_type": "spatial_unpad",
107
+ "mm_projector_lr": null,
108
+ "mm_projector_type": "mlp2x_gelu",
109
+ "mm_resampler_type": null,
110
+ "mm_spatial_pool_mode": "bilinear",
111
+ "mm_spatial_pool_stride": null,
112
+ "mm_tunable_parts": "mm_vision_tower,mm_mlp_adapter,mm_language_model",
113
+ "mm_use_im_patch_token": false,
114
+ "mm_use_im_start_end": false,
115
+ "mm_vision_select_feature": "patch",
116
+ "mm_vision_select_layer": -2,
117
+ "mm_vision_tower": "DeepGlint-AI/mlcd-vit-large-patch14-336",
118
+ "mm_vision_tower_lr": 2e-06,
119
+ "model_type": "qwen2",
120
+ "num_attention_heads": 28,
121
+ "num_hidden_layers": 28,
122
+ "num_key_value_heads": 4,
123
+ "pos_skipping_range": 4096,
124
+ "rms_norm_eps": 1e-06,
125
+ "rope_scaling": null,
126
+ "rope_theta": 1000000.0,
127
+ "sliding_window": null,
128
+ "tie_word_embeddings": false,
129
+ "tokenizer_model_max_length": 13768,
130
+ "tokenizer_padding_side": "right",
131
+ "torch_dtype": "bfloat16",
132
+ "transformers_version": "4.44.2",
133
+ "use_cache": true,
134
+ "use_mm_proj": true,
135
+ "use_pos_skipping": false,
136
+ "use_sliding_window": false,
137
+ "vision_tower_pretrained": null,
138
+ "vocab_size": 152064
139
+ }