Jiqing commited on
Commit
46006e2
·
1 Parent(s): 3907684

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +25 -7
config.json CHANGED
@@ -1,5 +1,4 @@
1
  {
2
- "_name_or_path": "/home/jiqingfe/protst/ProtST-HuggingFace/hf_model/protst_model/",
3
  "architectures": [
4
  "ProtSTModel"
5
  ],
@@ -13,31 +12,50 @@
13
  ],
14
  "attention_probs_dropout_prob": 0.0,
15
  "classifier_dropout": null,
16
- "cls_token_id": 0,
17
  "emb_layer_norm_before": true,
18
- "eos_token_id": 2,
19
  "hidden_act": "gelu",
20
  "hidden_dropout_prob": 0.0,
21
  "hidden_size": 1280,
 
22
  "intermediate_size": 5120,
 
23
  "layer_norm_eps": 1e-05,
24
  "mask_token_id": 32,
 
25
  "model_type": "esm",
26
  "num_attention_heads": 20,
27
  "num_hidden_layers": 33,
 
28
  "pad_token_id": 1,
 
 
29
  "token_dropout": true,
30
  "torch_dtype": "float32",
 
 
31
  "vocab_size": 33
32
  },
33
  "text_config": {
34
  "architectures": [
35
  "BertForMaskedLM"
36
  ],
37
- "cls_token_id": 2,
38
  "model_type": "bert",
39
- "sep_token_id": 3
 
 
 
 
 
 
 
 
 
 
 
 
 
40
  },
41
- "torch_dtype": "bfloat16",
42
- "transformers_version": "4.35.0.dev0"
43
  }
 
1
  {
 
2
  "architectures": [
3
  "ProtSTModel"
4
  ],
 
12
  ],
13
  "attention_probs_dropout_prob": 0.0,
14
  "classifier_dropout": null,
 
15
  "emb_layer_norm_before": true,
16
+ "esmfold_config": null,
17
  "hidden_act": "gelu",
18
  "hidden_dropout_prob": 0.0,
19
  "hidden_size": 1280,
20
+ "initializer_range": 0.02,
21
  "intermediate_size": 5120,
22
+ "is_folding_model": false,
23
  "layer_norm_eps": 1e-05,
24
  "mask_token_id": 32,
25
+ "max_position_embeddings": 1026,
26
  "model_type": "esm",
27
  "num_attention_heads": 20,
28
  "num_hidden_layers": 33,
29
+ "cls_token_id": 0,
30
  "pad_token_id": 1,
31
+ "eos_token_id": 2,
32
+ "position_embedding_type": "absolute",
33
  "token_dropout": true,
34
  "torch_dtype": "float32",
35
+ "use_cache": true,
36
+ "vocab_list": null,
37
  "vocab_size": 33
38
  },
39
  "text_config": {
40
  "architectures": [
41
  "BertForMaskedLM"
42
  ],
 
43
  "model_type": "bert",
44
+ "attention_probs_dropout_prob": 0.1,
45
+ "hidden_act": "gelu",
46
+ "pad_token_id": 0,
47
+ "cls_token_id": 2,
48
+ "sep_token_id": 3,
49
+ "hidden_dropout_prob": 0.1,
50
+ "hidden_size": 768,
51
+ "initializer_range": 0.02,
52
+ "intermediate_size": 3072,
53
+ "max_position_embeddings": 512,
54
+ "num_attention_heads": 12,
55
+ "num_hidden_layers": 12,
56
+ "type_vocab_size": 2,
57
+ "vocab_size": 30522
58
  },
59
+ "torch_dtype": "float32",
60
+ "transformers_version": "4.37.0.dev0"
61
  }