| { | |
| "mlp_bias": false, | |
| "attn_bias": false, | |
| "rotary_base": 1000000.0, | |
| "rotary_scaling": null, | |
| "disable_weight_only_quant_plugin": false, | |
| "num_labels": 1, | |
| "use_logn_attn": false, | |
| "moe": { | |
| "num_experts": 64, | |
| "shared_expert_intermediate_size": 20480, | |
| "top_k": 8, | |
| "normalization_mode": 0, | |
| "sparse_mixer_epsilon": 0.01, | |
| "tp_mode": 0, | |
| "device_limited_n_group": 0, | |
| "device_limited_topk_group": 0, | |
| "device_limited_routed_scaling_factor": 1.0 | |
| }, | |
| "architecture": "Qwen2MoeForCausalLM", | |
| "dtype": "bfloat16", | |
| "vocab_size": 151936, | |
| "hidden_size": 3584, | |
| "num_hidden_layers": 28, | |
| "num_attention_heads": 28, | |
| "hidden_act": "swiglu", | |
| "logits_dtype": "float32", | |
| "norm_epsilon": 1e-06, | |
| "runtime_defaults": null, | |
| "position_embedding_type": "rope_gpt_neox", | |
| "num_key_value_heads": 4, | |
| "intermediate_size": 18944, | |
| "max_position_embeddings": 32768, | |
| "mapping": { | |
| "world_size": 2, | |
| "gpus_per_node": 8, | |
| "cp_size": 1, | |
| "tp_size": 2, | |
| "pp_size": 1, | |
| "moe_tp_size": 2, | |
| "moe_ep_size": 1, | |
| "auto_parallel": false | |
| }, | |
| "quantization": { | |
| "quant_algo": "W4A16", | |
| "kv_cache_quant_algo": null, | |
| "group_size": 128, | |
| "smoothquant_val": 0.5, | |
| "clamp_val": null, | |
| "use_meta_recipe": false, | |
| "has_zero_point": false, | |
| "pre_quant_scale": false, | |
| "exclude_modules": null | |
| }, | |
| "use_parallel_embedding": false, | |
| "embedding_sharding_dim": 0, | |
| "head_size": 128, | |
| "qk_layernorm": false, | |
| "rotary_embedding_dim": 128, | |
| "seq_length": 8192, | |
| "qwen_type": "qwen2_moe", | |
| "moe_intermediate_size": 2560, | |
| "moe_shared_expert_intermediate_size": 20480, | |
| "tie_word_embeddings": false | |
| } |