FaeMo commited on
Commit
23587cf
·
verified ·
1 Parent(s): 426e3ed

Training in progress, step 1200

Browse files
adapter_config.json CHANGED
@@ -23,13 +23,13 @@
23
  "rank_pattern": {},
24
  "revision": null,
25
  "target_modules": [
26
- "up_proj",
27
  "down_proj",
 
 
28
  "k_proj",
29
- "o_proj",
30
  "q_proj",
31
- "gate_proj",
32
- "v_proj"
33
  ],
34
  "task_type": "CAUSAL_LM",
35
  "use_dora": false,
 
23
  "rank_pattern": {},
24
  "revision": null,
25
  "target_modules": [
 
26
  "down_proj",
27
+ "gate_proj",
28
+ "up_proj",
29
  "k_proj",
30
+ "v_proj",
31
  "q_proj",
32
+ "o_proj"
 
33
  ],
34
  "task_type": "CAUSAL_LM",
35
  "use_dora": false,
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:a8568687009556a5dfb29651378fdf075a63dcafe0d0d9d34b5a4e01b593bf84
3
  size 167832240
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:abb73cd05606c28edafc2fc9c0aae6fd30ee0e1a4aa714a40769143be6f9f14f
3
  size 167832240
runs/Feb28_01-01-01_gpu010/events.out.tfevents.1740725012.gpu010.12482.0 CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:cd1e7333d653dfff5267ff2d04714d82be278b6314741a771ebfd20e4ba8b625
3
- size 53286
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1bddf24a88f72f634d862547c9ce841450587d24cf91af2e16ada7c1b43c1cb8
3
+ size 53708
runs/Feb28_08-20-18_gpu039/events.out.tfevents.1740748941.gpu039.14500.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cfd8967663ff53ca8b889c9ccfe22395f9cb3fe4579d932127e75e6ffc68f5da
3
+ size 10979
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:c8a78117f51d132d902507c9b2d592239e829aae342a464662c7d1f1eb4eebf4
3
  size 6456
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2a3179145356394581957842d807d40d168b79beedee91fbb3287e211cc1cd5e
3
  size 6456