Alphatao commited on
Commit
c00a047
·
verified ·
1 Parent(s): 68d3b9d

Training in progress, step 250

Browse files
adapter_config.json CHANGED
@@ -1,7 +1,7 @@
1
  {
2
  "alpha_pattern": {},
3
  "auto_mapping": null,
4
- "base_model_name_or_path": "unsloth/Mistral-Nemo-Base-2407",
5
  "bias": "none",
6
  "corda_config": null,
7
  "eva_config": null,
@@ -25,12 +25,12 @@
25
  "revision": null,
26
  "target_modules": [
27
  "down_proj",
28
- "k_proj",
29
  "v_proj",
30
  "o_proj",
31
  "up_proj",
32
  "gate_proj",
33
- "q_proj"
34
  ],
35
  "task_type": "CAUSAL_LM",
36
  "trainable_token_indices": null,
 
1
  {
2
  "alpha_pattern": {},
3
  "auto_mapping": null,
4
+ "base_model_name_or_path": null,
5
  "bias": "none",
6
  "corda_config": null,
7
  "eva_config": null,
 
25
  "revision": null,
26
  "target_modules": [
27
  "down_proj",
28
+ "q_proj",
29
  "v_proj",
30
  "o_proj",
31
  "up_proj",
32
  "gate_proj",
33
+ "k_proj"
34
  ],
35
  "task_type": "CAUSAL_LM",
36
  "trainable_token_indices": null,
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:e6fb2c47944fd2f4753270ee8c6d1227502fbbae8e6e3288d2bf4ea334ad6b88
3
  size 228150120
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:77641736a27f80bd67a38f5a5b434ef99b73f4b62be8649193011f1b341f1186
3
  size 228150120
runs/May14_05-03-58_3b9dbe64873a/events.out.tfevents.1747199046.3b9dbe64873a.317.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:084f5570d79cca348a0b9dbd2ac611263c503835a93964762b36d791ac404a41
3
+ size 438539
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:226b438e87695b85d63c151ba818b85c53ddf1a754680309eee987646ce50f52
3
  size 7864
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bf614dd059fe2b87171be03ba21b46b2879c5f23250471c5eeae3ff12b961059
3
  size 7864