tainc commited on
Commit
a7080b3
·
verified ·
1 Parent(s): 6f60058

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +0 -3
config.json CHANGED
@@ -1,5 +1,4 @@
1
  {
2
- "_name_or_path": "/mnt/fs-arf-01/tai/sft/llamafactory_sft/complete/simpo/lf_llama3_8b_CPTHRUN2_sftqlora_com0.3Wselfmagpiehigh_llama3tmp_simpoqlora_gemmaseafeedback",
3
  "architectures": [
4
  "LlamaForCausalLM"
5
  ],
@@ -19,8 +18,6 @@
19
  "num_attention_heads": 32,
20
  "num_hidden_layers": 32,
21
  "num_key_value_heads": 8,
22
- "pretrained": true,
23
- "pretrained_model_name_or_path": "/mnt/fs-arf-01/cpt_checkpoints/meta-llama/Meta-Llama-3-8B",
24
  "pretraining_tp": 1,
25
  "rms_norm_eps": 1e-05,
26
  "rope_scaling": null,
 
1
  {
 
2
  "architectures": [
3
  "LlamaForCausalLM"
4
  ],
 
18
  "num_attention_heads": 32,
19
  "num_hidden_layers": 32,
20
  "num_key_value_heads": 8,
 
 
21
  "pretraining_tp": 1,
22
  "rms_norm_eps": 1e-05,
23
  "rope_scaling": null,