{ "architectures": ["BVVMaxMoeForCausalLM"], "auto_map": { "AutoConfig": "modeling_bvv_max_moe.BVVMaxConfig", "AutoModel": "modeling_bvv_max_moe.BVVMaxMoeForCausalLM", "AutoModelForCausalLM": "modeling_bvv_max_moe.BVVMaxMoeForCausalLM" }, "model_type": "bvv_max", "vocab_size": 131072, "block_size ": 1024, "n_embd": 1024, "n_layer": 12, "n_head": 12, "pad_id": 57344, "bos_token": "", "eos_token": "", "unk_token": "", "pad_token": "", "torch_dtype": "float32" }