OPEA
/

Safetensors
mllama
4-bit precision
auto-round
cicdatopea commited on
Commit
c643066
·
verified ·
1 Parent(s): 332dc5a

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +3 -3
config.json CHANGED
@@ -7,7 +7,7 @@
7
  "model_type": "mllama",
8
  "quantization_config": {
9
  "amp": true,
10
- "autoround_version": "0.4.0.dev",
11
  "batch_size": 1,
12
  "bits": 4,
13
  "data_type": "int",
@@ -26,7 +26,7 @@
26
  "scale_dtype": "torch.float16",
27
  "seqlen": 512,
28
  "sym": true,
29
- "block_name_to_quantize": "language_model.model.layers"
30
  },
31
  "text_config": {
32
  "_attn_implementation_autoset": false,
@@ -139,7 +139,7 @@
139
  "vocab_size": 128256
140
  },
141
  "torch_dtype": "bfloat16",
142
- "transformers_version": "4.46.1",
143
  "vision_config": {
144
  "_attn_implementation_autoset": false,
145
  "_name_or_path": "",
 
7
  "model_type": "mllama",
8
  "quantization_config": {
9
  "amp": true,
10
+ "autoround_version": "0.5.1",
11
  "batch_size": 1,
12
  "bits": 4,
13
  "data_type": "int",
 
26
  "scale_dtype": "torch.float16",
27
  "seqlen": 512,
28
  "sym": true,
29
+ "block_name_to_quantize": "language_model.model.layers,model.language_model.layers"
30
  },
31
  "text_config": {
32
  "_attn_implementation_autoset": false,
 
139
  "vocab_size": 128256
140
  },
141
  "torch_dtype": "bfloat16",
142
+ "transformers_version": "4.52.2",
143
  "vision_config": {
144
  "_attn_implementation_autoset": false,
145
  "_name_or_path": "",