cicdatopea commited on
Commit
7110879
·
verified ·
1 Parent(s): 514ab7c

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +3 -3
config.json CHANGED
@@ -9,10 +9,10 @@
9
  "quantization_config": {
10
  "act_bits": 32,
11
  "amp": true,
12
- "autoround_version": "0.4.6",
13
  "batch_size": 8,
14
  "bits": 4,
15
- "block_name_to_quantize": "language_model.model.layers",
16
  "damp_percent": 0.01,
17
  "data_type": "int",
18
  "desc_act": false,
@@ -52,7 +52,7 @@
52
  "vocab_size": 131072
53
  },
54
  "torch_dtype": "float16",
55
- "transformers_version": "4.50.0.dev0",
56
  "vision_config": {
57
  "attention_dropout": 0.0,
58
  "head_dim": 64,
 
9
  "quantization_config": {
10
  "act_bits": 32,
11
  "amp": true,
12
+ "autoround_version": "0.5.1",
13
  "batch_size": 8,
14
  "bits": 4,
15
+ "block_name_to_quantize": "language_model.model.layers,model.language_model.layers",
16
  "damp_percent": 0.01,
17
  "data_type": "int",
18
  "desc_act": false,
 
52
  "vocab_size": 131072
53
  },
54
  "torch_dtype": "float16",
55
+ "transformers_version": "4.52.2",
56
  "vision_config": {
57
  "attention_dropout": 0.0,
58
  "head_dim": 64,