ashabrawy commited on
Commit
3c6128a
·
verified ·
1 Parent(s): 1387f16

jais-family-590m_gamma_0.0_0_lamda_0.0-0_English_sft/

Browse files
README.md CHANGED
@@ -27,7 +27,7 @@ print(output["generated_text"])
27
 
28
  ## Training procedure
29
 
30
- [<img src="https://raw.githubusercontent.com/wandb/assets/main/wandb-github-badge-28.svg" alt="Visualize in Weights & Biases" width="150" height="24"/>](https://wandb.ai/afz225/uncategorized/runs/vu02b53c)
31
 
32
  This model was trained with SFT.
33
 
 
27
 
28
  ## Training procedure
29
 
30
+ [<img src="https://raw.githubusercontent.com/wandb/assets/main/wandb-github-badge-28.svg" alt="Visualize in Weights & Biases" width="150" height="24"/>](https://wandb.ai/afz225/uncategorized/runs/41ghlxmz)
31
 
32
  This model was trained with SFT.
33
 
adapter_config.json CHANGED
@@ -23,14 +23,14 @@
23
  "rank_pattern": {},
24
  "revision": null,
25
  "target_modules": [
26
- "down_proj",
 
27
  "o_proj",
28
- "lm_head",
29
- "gate_proj",
30
- "v_proj",
31
  "q_proj",
32
- "up_proj",
33
- "k_proj"
 
 
34
  ],
35
  "task_type": "CAUSAL_LM",
36
  "use_dora": false,
 
23
  "rank_pattern": {},
24
  "revision": null,
25
  "target_modules": [
26
+ "up_proj",
27
+ "k_proj",
28
  "o_proj",
 
 
 
29
  "q_proj",
30
+ "v_proj",
31
+ "gate_proj",
32
+ "down_proj",
33
+ "lm_head"
34
  ],
35
  "task_type": "CAUSAL_LM",
36
  "use_dora": false,
global_step220/zero_pp_rank_0_mp_rank_00_model_states.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f958a8aec2341e4e6912df4f0fa282b7e3ad7ae3bfe0eba21d000d12edd80776
3
+ size 160351592
global_step220/zero_pp_rank_0_mp_rank_00_optim_states.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:82305266487788a54b016d0e85a1e2bf932901a2b3f763d602577918494c3f38
3
+ size 2080173
global_step220/zero_pp_rank_1_mp_rank_00_model_states.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:88dbdaf472146625937cca8d555e21ff19f67e96a10686f3749ce8802b0f4470
3
+ size 160351464
global_step220/zero_pp_rank_1_mp_rank_00_optim_states.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0c9788b130fadb2cd48fc7a13800b36859e10a515facfadd5aea468464ff039e
3
+ size 2080173
global_step220/zero_pp_rank_2_mp_rank_00_model_states.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6ac04269b2b8ac9998096d4bd21d1cc88a41d2af417fdbc616009f168d4c00cd
3
+ size 160351464
global_step220/zero_pp_rank_2_mp_rank_00_optim_states.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b5b5f1201fe61a21c935736b6479c538bf33492266585c03681ba6aa986fa60e
3
+ size 2080173
global_step220/zero_pp_rank_3_mp_rank_00_model_states.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:66a2798383469d5bc69ddae9ac4cd003c347bea4e3f4f043b685c56b5471b916
3
+ size 160351464
global_step220/zero_pp_rank_3_mp_rank_00_optim_states.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:055f2cbd58b8d13b7423701d017ca4013c249be901d29d071383f5d42bc1cb19
3
+ size 2080173
global_step220/zero_pp_rank_4_mp_rank_00_model_states.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:28675dbb3b0501e23b782dbf67aa12c070935cf93dc31cf716a882d5344af6ae
3
+ size 160351464
global_step220/zero_pp_rank_4_mp_rank_00_optim_states.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:108791123ba4518cdc5ee0ac1802bb2bbd7789d9bb2af8fb49b5c426de026a07
3
+ size 2080173
global_step220/zero_pp_rank_5_mp_rank_00_model_states.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ac0ad61d49a3f926198a7df5d80470d16b76a02aaef8fc51d5fb8948daa2d662
3
+ size 160351464
global_step220/zero_pp_rank_5_mp_rank_00_optim_states.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:aae336eb42aeba5dcaf7731c409a50b1a5cf12b042723ae361ab401c21e63017
3
+ size 2080173
global_step220/zero_pp_rank_6_mp_rank_00_model_states.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a3a624ad715f8aa955c0d66c7ba6ac298a9116d8bf87eeeaa4a52710711f53ed
3
+ size 160351464
global_step220/zero_pp_rank_6_mp_rank_00_optim_states.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0a056b429cf91f3a13d3362cf5b877ee0267766320c2e12682c6522f30023113
3
+ size 2080173
global_step220/zero_pp_rank_7_mp_rank_00_model_states.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:df8c5207fd7c883cc01ec2ec5ffc69db4b75acb02979c6405117e7e7fccf29cf
3
+ size 160351464
global_step220/zero_pp_rank_7_mp_rank_00_optim_states.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1fa269a9865f6212ce703e5e66af09ce9489bd5fcf7983ef776aa9a1813cd87d
3
+ size 2080173
latest CHANGED
@@ -1 +1 @@
1
- global_step132
 
1
+ global_step220
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:955bf9d747a554fd4cf098450c841b257e20b71d16648ef4e61570eff7164623
3
  size 6968
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fea745b628a3dd3527df98d769e6c41abaea9f9ece620f4da14fa89c11f1e2bc
3
  size 6968