FaeMo commited on
Commit
2a1914f
·
verified ·
1 Parent(s): 4e91582

Training in progress, step 3000

Browse files
adapter_config.json CHANGED
@@ -23,13 +23,13 @@
23
  "rank_pattern": {},
24
  "revision": null,
25
  "target_modules": [
26
- "down_proj",
27
- "gate_proj",
28
  "up_proj",
29
- "k_proj",
30
- "v_proj",
31
  "q_proj",
32
- "o_proj"
 
 
 
 
33
  ],
34
  "task_type": "CAUSAL_LM",
35
  "use_dora": false,
 
23
  "rank_pattern": {},
24
  "revision": null,
25
  "target_modules": [
 
 
26
  "up_proj",
 
 
27
  "q_proj",
28
+ "gate_proj",
29
+ "k_proj",
30
+ "o_proj",
31
+ "down_proj",
32
+ "v_proj"
33
  ],
34
  "task_type": "CAUSAL_LM",
35
  "use_dora": false,
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:af65bbcbed595cfdd515710943d059066090a9e2ffea505bde05d2ee01088300
3
  size 167832240
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2b1afbdb78ca035364bf77e43579ef46ad75a157db7bdcd605aded920d4c44b4
3
  size 167832240
runs/Feb28_08-20-18_gpu039/events.out.tfevents.1740748941.gpu039.14500.0 CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:98e93de24edb3a6f90b462e910c18c11b4a79a8909d6aaae540aabc5897335b7
3
- size 78499
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:13ef91e6d8e1cc9b1f3e768957d4742554c8286b6f1cf232bb7371fb7930903c
3
+ size 80820
runs/Feb28_16-32-38_gpu004/events.out.tfevents.1740778463.gpu004.29439.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ce1a1f1e2f5d1564ea93adc193949f1ede7d818eac6ee7688f0f02d4e90db176
3
+ size 15199
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:2a3179145356394581957842d807d40d168b79beedee91fbb3287e211cc1cd5e
3
  size 6456
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:489631830857fb9b30f4af58def02d6abcbb877b0fd325f91a8795d94ec8de99
3
  size 6456