aiden200 commited on
Commit
7f52d24
·
verified ·
1 Parent(s): d6e9f56

Training in progress, step 25

Browse files
adapter_config.json CHANGED
@@ -7,21 +7,20 @@
7
  "init_lora_weights": true,
8
  "layers_pattern": null,
9
  "layers_to_transform": null,
10
- "lora_alpha": 32,
11
  "lora_dropout": 0.05,
12
  "modules_to_save": [
13
  "connector",
14
  "mm_projector",
15
  "response_head",
16
- "related_head",
17
  "lm_head",
18
  "informative_head",
19
  "relevance_head",
20
  "uncertainty_head"
21
  ],
22
  "peft_type": "LORA",
23
- "r": 16,
24
  "revision": null,
25
- "target_modules": "model\\.layers.*(q_proj|k_proj|v_proj|o_proj|gate_proj|up_proj|down_proj)$",
26
  "task_type": "CAUSAL_LM"
27
  }
 
7
  "init_lora_weights": true,
8
  "layers_pattern": null,
9
  "layers_to_transform": null,
10
+ "lora_alpha": 16,
11
  "lora_dropout": 0.05,
12
  "modules_to_save": [
13
  "connector",
14
  "mm_projector",
15
  "response_head",
 
16
  "lm_head",
17
  "informative_head",
18
  "relevance_head",
19
  "uncertainty_head"
20
  ],
21
  "peft_type": "LORA",
22
+ "r": 8,
23
  "revision": null,
24
+ "target_modules": "model\\.layers.*(q_proj|k_proj|v_proj|gate_proj)$",
25
  "task_type": "CAUSAL_LM"
26
  }
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:f4b105f8ba9adeb788677a2b0fc9f3480c4afb4a2fc1b943d0cce350b32bf647
3
- size 1204780872
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c1ade9748556e4d56886cccd85bf923f95e6bfecdeaab4d697308aa9a23a157c
3
+ size 1140991056
train.log CHANGED
The diff for this file is too large to render. See raw diff
 
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:9a8273c75b0094b53824030ce4878138db8805782a9abaa305b557281cfaf2bd
3
  size 7672
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5fe8b61c87f3a88ae769d377e2273c4fdb025ccd9b26bc8dd91440af90212fd2
3
  size 7672