kazemnejad commited on
Commit
33aef7e
·
verified ·
1 Parent(s): 86c21b5

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +2 -21
config.json CHANGED
@@ -9,9 +9,6 @@
9
  "projector_type": "mlp_gelu"
10
  }
11
  },
12
- "architectures": [
13
- "MultiModalityCausalLM"
14
- ],
15
  "gen_aligner_config": {
16
  "cls": "MlpProjector",
17
  "model_type": "gen_aligner",
@@ -40,31 +37,15 @@
40
  }
41
  },
42
  "language_config": {
43
- "_attn_implementation_autoset": true,
44
- "attention_bias": false,
45
- "attention_dropout": 0.0,
46
- "head_dim": 128,
47
- "hidden_act": "silu",
48
- "hidden_size": 4096,
49
- "initializer_range": 0.02,
50
- "intermediate_size": 11008,
51
  "max_position_embeddings": 16384,
52
- "mlp_bias": false,
53
  "model_type": "llama",
54
- "num_attention_heads": 32,
55
  "num_hidden_layers": 30,
56
- "num_key_value_heads": 32,
57
- "pretraining_tp": 1,
58
- "rms_norm_eps": 1e-06,
59
- "rope_scaling": null,
60
- "rope_theta": 10000.0,
61
  "torch_dtype": "bfloat16",
62
- "use_cache": true,
63
  "vocab_size": 102400
64
  },
65
  "model_type": "multi_modality",
66
  "torch_dtype": "bfloat16",
67
- "transformers_version": "4.51.3",
68
  "vision_config": {
69
  "cls": "CLIPVisionTower",
70
  "model_type": "vision",
@@ -75,4 +56,4 @@
75
  "select_layer": -1
76
  }
77
  }
78
- }
 
9
  "projector_type": "mlp_gelu"
10
  }
11
  },
 
 
 
12
  "gen_aligner_config": {
13
  "cls": "MlpProjector",
14
  "model_type": "gen_aligner",
 
37
  }
38
  },
39
  "language_config": {
 
 
 
 
 
 
 
 
40
  "max_position_embeddings": 16384,
 
41
  "model_type": "llama",
 
42
  "num_hidden_layers": 30,
 
 
 
 
 
43
  "torch_dtype": "bfloat16",
 
44
  "vocab_size": 102400
45
  },
46
  "model_type": "multi_modality",
47
  "torch_dtype": "bfloat16",
48
+ "transformers_version": "4.33.1",
49
  "vision_config": {
50
  "cls": "CLIPVisionTower",
51
  "model_type": "vision",
 
56
  "select_layer": -1
57
  }
58
  }
59
+ }