sboshar commited on
Commit
ff82eaf
·
verified ·
1 Parent(s): 76b5a05

Upload hyperparams.json

Browse files
Files changed (1) hide show
  1. jax_model/hyperparams.json +28 -0
jax_model/hyperparams.json ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "alphabet_size": 75,
3
+ "pad_token_id": 1,
4
+ "mask_token_id": 2,
5
+ "max_positions": 2048,
6
+ "k_for_kmers": 3,
7
+ "embed_scale": 1.0,
8
+ "emb_layer_norm_before": false,
9
+ "attention_heads": 16,
10
+ "key_size": 32,
11
+ "embed_dim": 512,
12
+ "ffn_embed_dim": 2048,
13
+ "num_layers": 12,
14
+ "positional_embedding": null,
15
+ "lm_head": "roberta",
16
+ "add_bias_kv": false,
17
+ "add_bias_ffn": false,
18
+ "use_rotary_embedding": true,
19
+ "ffn_activation_name": "swish",
20
+ "use_glu_in_ffn": true,
21
+ "mask_before_attention": false,
22
+ "token_dropout": false,
23
+ "masking_ratio": 0.0,
24
+ "masking_prob": 0.0,
25
+ "use_gradient_checkpointing": false,
26
+ "embeddings_layers_to_save": [],
27
+ "attention_maps_to_save": []
28
+ }