aiden200 commited on
Commit
3f6c68d
·
verified ·
1 Parent(s): 284aab5

Upload adapter_config.json with huggingface_hub

Browse files
Files changed (1) hide show
  1. adapter_config.json +3 -3
adapter_config.json CHANGED
@@ -7,7 +7,7 @@
7
  "init_lora_weights": true,
8
  "layers_pattern": null,
9
  "layers_to_transform": null,
10
- "lora_alpha": 16,
11
  "lora_dropout": 0.05,
12
  "modules_to_save": [
13
  "connector",
@@ -19,8 +19,8 @@
19
  "uncertainty_head"
20
  ],
21
  "peft_type": "LORA",
22
- "r": 8,
23
  "revision": null,
24
- "target_modules": "model\\.layers.*(q_proj|k_proj|v_proj|gate_proj)$",
25
  "task_type": "CAUSAL_LM"
26
  }
 
7
  "init_lora_weights": true,
8
  "layers_pattern": null,
9
  "layers_to_transform": null,
10
+ "lora_alpha": 32,
11
  "lora_dropout": 0.05,
12
  "modules_to_save": [
13
  "connector",
 
19
  "uncertainty_head"
20
  ],
21
  "peft_type": "LORA",
22
+ "r": 16,
23
  "revision": null,
24
+ "target_modules": "model\\.layers.*(q_proj|k_proj|v_proj|o_proj|gate_proj|up_proj|down_proj)$",
25
  "task_type": "CAUSAL_LM"
26
  }