Dolboebina commited on
Commit
cd3e5dc
·
verified ·
1 Parent(s): f3a0f53

Upload original/config.json with huggingface_hub

Browse files
Files changed (1) hide show
  1. original/config.json +1 -0
original/config.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"num_hidden_layers": 24, "num_experts": 32, "experts_per_token": 4, "vocab_size": 201088, "hidden_size": 2880, "intermediate_size": 2880, "swiglu_limit": 7.0, "head_dim": 64, "num_attention_heads": 64, "num_key_value_heads": 8, "sliding_window": 128, "initial_context_length": 4096, "rope_theta": 150000, "rope_scaling_factor": 32.0, "rope_ntk_alpha": 1, "rope_ntk_beta": 32}