acezxn commited on
Commit
f52a773
·
verified ·
1 Parent(s): c7ed971

End of training

Browse files
adapter_config.json CHANGED
@@ -22,17 +22,17 @@
22
  "embed_tokens"
23
  ],
24
  "peft_type": "LORA",
25
- "r": 128,
26
  "rank_pattern": {},
27
  "revision": null,
28
  "target_modules": [
29
- "gate_proj",
30
- "up_proj",
31
- "q_proj",
32
  "v_proj",
33
  "k_proj",
34
- "down_proj",
35
- "o_proj"
 
 
 
36
  ],
37
  "task_type": "CAUSAL_LM",
38
  "use_dora": false,
 
22
  "embed_tokens"
23
  ],
24
  "peft_type": "LORA",
25
+ "r": 32,
26
  "rank_pattern": {},
27
  "revision": null,
28
  "target_modules": [
 
 
 
29
  "v_proj",
30
  "k_proj",
31
+ "o_proj",
32
+ "q_proj",
33
+ "up_proj",
34
+ "gate_proj",
35
+ "down_proj"
36
  ],
37
  "task_type": "CAUSAL_LM",
38
  "use_dora": false,
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:ff55eb06dc847329dbd325240231cdfb8b4917254715527d716ce77f9f35b90a
3
- size 2354106632
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b6aa973b9d983b70ad8b1d07b4e5d400e5cf9636a233b805168041476bdb8026
3
+ size 1770573360
runs/Mar16_16-43-17_213c2d861ca3/events.out.tfevents.1742143402.213c2d861ca3.314.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:99c5a527861e290d0a5f69f57004da1c20b2b35f38c2e5f56d44bcfa2f138352
3
+ size 33490
runs/Mar16_16-52-22_213c2d861ca3/events.out.tfevents.1742143943.213c2d861ca3.3571.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d413c8cadc62b06a47d835aa442a914a7a740eabd301a152fe5b7d6e068f6f0e
3
+ size 33490
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:455cc28dec9e5071fc02301167d31fa1ab1dee7f1e92c249a66bfe2aff32ba47
3
  size 5688
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:74013344b0295f5b76921f4f3acbf6c83815beba7621a947183b23f37364a439
3
  size 5688