amd
/

Safetensors
llama
alignment-handbook
Generated from Trainer
Mingyuyang-1 commited on
Commit
66c9adc
·
verified ·
1 Parent(s): 3282f04

Update hybrid_config.json

Browse files
Files changed (1) hide show
  1. hybrid_config.json +0 -4
hybrid_config.json CHANGED
@@ -15,14 +15,10 @@
15
  "kv_lora_rank": 128,
16
  "q_lora_rank": 1344,
17
  "use_lora_layer_norm": false,
18
- "use_fixed_rank_for_first_and_last_block": true,
19
  "use_full_kv_head": false,
20
- "layer_rank_list": {},
21
  "qk_rope_head_dim": 32,
22
  "v_head_dim": 64,
23
  "qk_nope_head_dim": 32,
24
- "q_energy_ratio": null,
25
- "kv_energy_ratio": null,
26
  "qkv_rank_divisor": 8,
27
  "max_position_embeddings": 131072,
28
  "rope_theta": 500000.0,
 
15
  "kv_lora_rank": 128,
16
  "q_lora_rank": 1344,
17
  "use_lora_layer_norm": false,
 
18
  "use_full_kv_head": false,
 
19
  "qk_rope_head_dim": 32,
20
  "v_head_dim": 64,
21
  "qk_nope_head_dim": 32,
 
 
22
  "qkv_rank_divisor": 8,
23
  "max_position_embeddings": 131072,
24
  "rope_theta": 500000.0,