RichardErkhov commited on
Commit
05a62ab
·
verified ·
1 Parent(s): 2301bb9

uploaded model

Browse files
Files changed (1) hide show
  1. config.json +160 -0
config.json ADDED
@@ -0,0 +1,160 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "Phi-3.5-vision-instruct_20240915_223241",
3
+ "architectures": [
4
+ "Phi3VForCausalLM"
5
+ ],
6
+ "attention_dropout": 0.0,
7
+ "auto_map": {
8
+ "AutoConfig": "configuration_phi3_v.Phi3VConfig",
9
+ "AutoModelForCausalLM": "microsoft/Phi-3.5-vision-instruct--modeling_phi3_v.Phi3VForCausalLM"
10
+ },
11
+ "bos_token_id": 1,
12
+ "embd_layer": {
13
+ "embedding_cls": "image",
14
+ "hd_transform_order": "sub_glb",
15
+ "projection_cls": "mlp",
16
+ "use_hd_transform": true,
17
+ "with_learnable_separator": true
18
+ },
19
+ "embd_pdrop": 0.0,
20
+ "eos_token_id": 2,
21
+ "hidden_act": "silu",
22
+ "hidden_size": 3072,
23
+ "img_processor": {
24
+ "image_dim_out": 1024,
25
+ "model_name": "openai/clip-vit-large-patch14-336",
26
+ "name": "clip_vision_model",
27
+ "num_img_tokens": 144
28
+ },
29
+ "initializer_range": 0.02,
30
+ "intermediate_size": 8192,
31
+ "max_position_embeddings": 131072,
32
+ "model_type": "phi3_v",
33
+ "num_attention_heads": 32,
34
+ "num_hidden_layers": 32,
35
+ "num_key_value_heads": 32,
36
+ "original_max_position_embeddings": 4096,
37
+ "pad_token_id": 32000,
38
+ "quantization_config": {
39
+ "bits": 4,
40
+ "group_size": 128,
41
+ "modules_to_not_convert": [
42
+ "vision_embed_tokens"
43
+ ],
44
+ "quant_method": "awq",
45
+ "version": "gemm",
46
+ "zero_point": true
47
+ },
48
+ "resid_pdrop": 0.0,
49
+ "rms_norm_eps": 1e-05,
50
+ "rope_scaling": {
51
+ "long_factor": [
52
+ 1.0800000429153442,
53
+ 1.1100000143051147,
54
+ 1.1399999856948853,
55
+ 1.340000033378601,
56
+ 1.5899999141693115,
57
+ 1.600000023841858,
58
+ 1.6200000047683716,
59
+ 2.620000123977661,
60
+ 3.2300000190734863,
61
+ 3.2300000190734863,
62
+ 4.789999961853027,
63
+ 7.400000095367432,
64
+ 7.700000286102295,
65
+ 9.09000015258789,
66
+ 12.199999809265137,
67
+ 17.670000076293945,
68
+ 24.46000099182129,
69
+ 28.57000160217285,
70
+ 30.420001983642578,
71
+ 30.840002059936523,
72
+ 32.590003967285156,
73
+ 32.93000411987305,
74
+ 42.320003509521484,
75
+ 44.96000289916992,
76
+ 50.340003967285156,
77
+ 50.45000457763672,
78
+ 57.55000305175781,
79
+ 57.93000411987305,
80
+ 58.21000289916992,
81
+ 60.1400032043457,
82
+ 62.61000442504883,
83
+ 62.62000274658203,
84
+ 62.71000289916992,
85
+ 63.1400032043457,
86
+ 63.1400032043457,
87
+ 63.77000427246094,
88
+ 63.93000411987305,
89
+ 63.96000289916992,
90
+ 63.970001220703125,
91
+ 64.02999877929688,
92
+ 64.06999969482422,
93
+ 64.08000183105469,
94
+ 64.12000274658203,
95
+ 64.41000366210938,
96
+ 64.4800033569336,
97
+ 64.51000213623047,
98
+ 64.52999877929688,
99
+ 64.83999633789062
100
+ ],
101
+ "short_factor": [
102
+ 1.08,
103
+ 1.1,
104
+ 1.1300000000000001,
105
+ 1.2800000000000002,
106
+ 1.3100000000000003,
107
+ 1.4500000000000004,
108
+ 1.4500000000000004,
109
+ 1.9500000000000008,
110
+ 2.030000000000001,
111
+ 2.4299999999999926,
112
+ 2.5699999999999896,
113
+ 2.9499999999999815,
114
+ 3.729999999999965,
115
+ 3.869999999999962,
116
+ 4.189999999999955,
117
+ 4.43999999999995,
118
+ 4.6399999999999455,
119
+ 4.979999999999938,
120
+ 5.159999999999934,
121
+ 5.279999999999932,
122
+ 5.759999999999922,
123
+ 5.889999999999919,
124
+ 5.889999999999919,
125
+ 5.969999999999917,
126
+ 6.089999999999915,
127
+ 6.2799999999999105,
128
+ 6.7699999999999,
129
+ 6.8899999999998975,
130
+ 7.109999999999893,
131
+ 7.129999999999892,
132
+ 7.179999999999891,
133
+ 7.289999999999889,
134
+ 7.339999999999888,
135
+ 7.559999999999883,
136
+ 7.619999999999882,
137
+ 7.69999999999988,
138
+ 7.879999999999876,
139
+ 7.879999999999876,
140
+ 7.879999999999876,
141
+ 7.939999999999875,
142
+ 7.949999999999875,
143
+ 7.979999999999874,
144
+ 8.19999999999987,
145
+ 8.439999999999864,
146
+ 8.469999999999864,
147
+ 8.589999999999861,
148
+ 8.809999999999857,
149
+ 8.999999999999853
150
+ ],
151
+ "type": "su"
152
+ },
153
+ "rope_theta": 10000.0,
154
+ "sliding_window": 262144,
155
+ "tie_word_embeddings": false,
156
+ "torch_dtype": "float16",
157
+ "transformers_version": "4.47.1",
158
+ "use_cache": false,
159
+ "vocab_size": 32064
160
+ }