GregSamek commited on
Commit
5a36696
1 Parent(s): a97c29d

Upload config.json

Browse files
Files changed (1) hide show
  1. config.json +17 -8
config.json CHANGED
@@ -1,9 +1,18 @@
1
- {
2
- "vocab_size": 8192,
3
- "context_length": 128,
4
- "d_embedding": 512,
5
- "d_intermediate": 2048,
6
- "n_heads": 16,
7
- "n_layers": 16,
8
- "qkv_bias": false
 
 
 
 
 
 
 
 
 
9
  }
 
1
+ {
2
+ "model": {
3
+ "vocab_size": 8192,
4
+ "context_length": 128,
5
+ "d_embedding": 512,
6
+ "d_intermediate": 2048,
7
+ "n_heads": 16,
8
+ "n_layers": 16,
9
+ "qkv_bias": false
10
+ },
11
+ "train": {
12
+ "peak_lr": 0.001,
13
+ "warmup_ratio": 0.01,
14
+ "n_epochs": 2,
15
+ "batch_size": 8,
16
+ "weight_decay": 0.1
17
+ }
18
  }