kmfoda commited on
Commit
ed4dae4
·
verified ·
1 Parent(s): 273c6f9

Run 4. Outer Step 0. Inner Step 3525.

Browse files
Files changed (3) hide show
  1. config.json +7 -7
  2. inner_optimizer.pt +1 -1
  3. model.safetensors +1 -1
config.json CHANGED
@@ -1,23 +1,23 @@
1
  {
2
- "_name_or_path": "distributed/llama-1b",
3
  "architectures": [
4
  "LlamaForCausalLM"
5
  ],
6
  "attention_bias": false,
7
  "attention_dropout": 0.0,
8
  "block_list": [
9
- 4517192,
10
- 4517200,
11
- 4517207,
12
- 4517214,
13
- 4517220
14
  ],
15
  "bos_token_id": 1,
16
  "eos_token_id": 2,
17
  "hidden_act": "silu",
18
  "hidden_size": 2048,
19
  "initializer_range": 0.02,
20
- "inner_step": 3520,
21
  "intermediate_size": 5632,
22
  "last_allreduce_block": 5517501,
23
  "max_position_embeddings": 2048,
 
1
  {
2
+ "_name_or_path": "kmfoda/llama-1b-bs2048-nodt-1_1",
3
  "architectures": [
4
  "LlamaForCausalLM"
5
  ],
6
  "attention_bias": false,
7
  "attention_dropout": 0.0,
8
  "block_list": [
9
+ 4517396,
10
+ 4517401,
11
+ 4517407,
12
+ 4517416,
13
+ 4517420
14
  ],
15
  "bos_token_id": 1,
16
  "eos_token_id": 2,
17
  "hidden_act": "silu",
18
  "hidden_size": 2048,
19
  "initializer_range": 0.02,
20
+ "inner_step": 3525,
21
  "intermediate_size": 5632,
22
  "last_allreduce_block": 5517501,
23
  "max_position_embeddings": 2048,
inner_optimizer.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:7399255da3cb2cbe46719e4781181007345d9c8e4891f2bedb44d20d14dc1908
3
  size 8800564012
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ad3b719474bbd8ce6a715c126c0d18ecfcb17d44073bdd6188a31eb18fc258e9
3
  size 8800564012
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:8cbbc034ed53bba45ef0b363f1cb6b0f5bfe4538dde27e7da71fbbcd09550ef5
3
  size 4400216536
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0f399f0241c0fea4caf2ba843edea854f35000cdea75bc8e57a4f42169f28212
3
  size 4400216536