amd
/

Safetensors
llama
alignment-handbook
Generated from Trainer
Mingyuyang-1 commited on
Commit
3282f04
·
verified ·
1 Parent(s): bcbfa41

Update hybrid_config.json

Browse files
Files changed (1) hide show
  1. hybrid_config.json +1 -4
hybrid_config.json CHANGED
@@ -28,14 +28,11 @@
28
  "rope_theta": 500000.0,
29
  "rope_scaling": {
30
  "factor": 32.0,
31
- "high_freq_factor": 4.0,
32
- "low_freq_factor": 1.0,
33
  "original_max_position_embeddings": 8192,
34
- "rope_type": "llama3"
35
  },
36
  "attention_bias": false,
37
  "attention_dropout": 0.0,
38
- "rope_type": "yarn",
39
  "d_model": 2048,
40
  "ssm_cfg": {
41
  "expand": 1,
 
28
  "rope_theta": 500000.0,
29
  "rope_scaling": {
30
  "factor": 32.0,
 
 
31
  "original_max_position_embeddings": 8192,
32
+ "rope_type": "yarn"
33
  },
34
  "attention_bias": false,
35
  "attention_dropout": 0.0,
 
36
  "d_model": 2048,
37
  "ssm_cfg": {
38
  "expand": 1,