FaeMo commited on
Commit
acb30cd
·
verified ·
1 Parent(s): 4dec507

Training in progress, step 7900

Browse files
adapter_config.json CHANGED
@@ -23,13 +23,13 @@
23
  "rank_pattern": {},
24
  "revision": null,
25
  "target_modules": [
26
- "v_proj",
27
- "up_proj",
28
  "q_proj",
29
- "gate_proj",
 
30
  "down_proj",
 
31
  "k_proj",
32
- "o_proj"
33
  ],
34
  "task_type": "CAUSAL_LM",
35
  "use_dora": false,
 
23
  "rank_pattern": {},
24
  "revision": null,
25
  "target_modules": [
 
 
26
  "q_proj",
27
+ "o_proj",
28
+ "up_proj",
29
  "down_proj",
30
+ "v_proj",
31
  "k_proj",
32
+ "gate_proj"
33
  ],
34
  "task_type": "CAUSAL_LM",
35
  "use_dora": false,
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:ee0a30bf7667e4066256dcf619aecac97943f48876f8ffc5c2403d99571bc73a
3
  size 167832240
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7bb29e0f071a93a6a9e0c67107296d24b05994fb2300230aff265ea31b681855
3
  size 167832240
runs/Mar03_16-26-43_gpu004/events.out.tfevents.1741037337.gpu004.4332.0 CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:c6fb64747d40cc60daa16b139d187e3f7eb8e2785542e2f9513f8a982e107be4
3
- size 78499
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8650e43cc448839fd64db8f159c23f4543e763c78004072bf209993ef47283df
3
+ size 80609
runs/Mar04_00-31-30_gpu004/events.out.tfevents.1741066317.gpu004.17065.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:606a6eb9e9f6813b307ba6d7dbd900d76105d7b6b58ecede727afc2e562bbc7e
3
+ size 10979
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:d5d7b62620cc2212e76e64a8a1374c09bd90b4c425305d61f51234fe37f7b295
3
  size 6456
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b499f106a652f74fd7dd4d873ed049c9f32f77ad358c09ec715dffa285743802
3
  size 6456