MISHANM commited on
Commit
5b2a2dc
·
verified ·
1 Parent(s): fd46698

Upload LlavaNextForConditionalGeneration

Browse files
config.json CHANGED
@@ -3,7 +3,6 @@
3
  "architectures": [
4
  "LlavaNextForConditionalGeneration"
5
  ],
6
- "ignore_index": -100,
7
  "image_grid_pinpoints": [
8
  [
9
  384,
@@ -117,6 +116,7 @@
117
  "image_seq_length": 576,
118
  "image_token_index": 49155,
119
  "model_type": "llava_next",
 
120
  "projector_hidden_act": "gelu",
121
  "text_config": {
122
  "architectures": [
@@ -139,12 +139,12 @@
139
  "rms_norm_eps": 1e-05,
140
  "rope_theta": 300000,
141
  "tie_word_embeddings": true,
142
- "torch_dtype": "bfloat16",
143
  "vocab_size": 49156
144
  },
145
  "tie_word_embeddings": true,
146
  "torch_dtype": "float16",
147
- "transformers_version": "4.46.1",
148
  "use_image_newline_parameter": true,
149
  "vision_config": {
150
  "hidden_act": "gelu_pytorch_tanh",
@@ -155,7 +155,8 @@
155
  "model_type": "siglip_vision_model",
156
  "num_attention_heads": 16,
157
  "num_hidden_layers": 27,
158
- "patch_size": 14
 
159
  },
160
  "vision_feature_layer": [
161
  -24,
 
3
  "architectures": [
4
  "LlavaNextForConditionalGeneration"
5
  ],
 
6
  "image_grid_pinpoints": [
7
  [
8
  384,
 
116
  "image_seq_length": 576,
117
  "image_token_index": 49155,
118
  "model_type": "llava_next",
119
+ "multimodal_projector_bias": true,
120
  "projector_hidden_act": "gelu",
121
  "text_config": {
122
  "architectures": [
 
139
  "rms_norm_eps": 1e-05,
140
  "rope_theta": 300000,
141
  "tie_word_embeddings": true,
142
+ "torch_dtype": "float32",
143
  "vocab_size": 49156
144
  },
145
  "tie_word_embeddings": true,
146
  "torch_dtype": "float16",
147
+ "transformers_version": "4.49.0",
148
  "use_image_newline_parameter": true,
149
  "vision_config": {
150
  "hidden_act": "gelu_pytorch_tanh",
 
155
  "model_type": "siglip_vision_model",
156
  "num_attention_heads": 16,
157
  "num_hidden_layers": 27,
158
+ "patch_size": 14,
159
+ "torch_dtype": "float32"
160
  },
161
  "vision_feature_layer": [
162
  -24,
generation_config.json CHANGED
@@ -3,5 +3,5 @@
3
  "bos_token_id": 0,
4
  "eos_token_id": 0,
5
  "pad_token_id": 0,
6
- "transformers_version": "4.46.1"
7
  }
 
3
  "bos_token_id": 0,
4
  "eos_token_id": 0,
5
  "pad_token_id": 0,
6
+ "transformers_version": "4.49.0"
7
  }
model-00001-of-00002.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:af1f74113515848caaa35a3c76f38a53275e37823b6f94c0b3afa32fd07c70b5
3
- size 4984561224
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:36d1e4bbb42ed061d6c0b962025759b8416be3f9b5e59d77fff83eebb674b477
3
+ size 4998717000
model.safetensors.index.json CHANGED
@@ -1,6 +1,6 @@
1
  {
2
  "metadata": {
3
- "total_size": 5936638080
4
  },
5
  "weight_map": {
6
  "image_newline": "model-00001-of-00002.safetensors",
 
1
  {
2
  "metadata": {
3
+ "total_size": 5950793856
4
  },
5
  "weight_map": {
6
  "image_newline": "model-00001-of-00002.safetensors",