{ "_attn_implementation_autoset": true, "architectures": [ "MiniCPMForCausalLM" ], "attention_bias": false, "attention_dropout": 0.0, "auto_map": { "AutoConfig": "openbmb/BitCPM4-1B--configuration_minicpm.MiniCPMConfig", "AutoModel": "openbmb/BitCPM4-1B--modeling_minicpm.MiniCPMModel", "AutoModelForCausalLM": "openbmb/BitCPM4-1B--modeling_minicpm.MiniCPMForCausalLM", "AutoModelForSeq2SeqLM": "openbmb/BitCPM4-1B--modeling_minicpm.MiniCPMForCausalLM", "AutoModelForSequenceClassification": "openbmb/BitCPM4-1B--modeling_minicpm.MiniCPMForSequenceClassification" }, "bos_token_id": 1, "dim_model_base": 256, "eos_token_id": [ 2, 73440 ], "hidden_act": "silu", "hidden_size": 1536, "initializer_range": 0.1, "intermediate_size": 3840, "max_position_embeddings": 32768, "model_type": "minicpm", "mup_denominator": null, "num_attention_heads": 24, "num_hidden_layers": 52, "num_key_value_heads": 8, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": { "long_factor": [ 1.0004360675811768, 1.0668443441390991, 1.1631425619125366, 1.3025742769241333, 1.5040205717086792, 1.7941505908966064, 2.2101221084594727, 2.802666664123535, 3.6389970779418945, 4.804192543029785, 6.39855432510376, 8.527148246765137, 11.277542114257812, 14.684998512268066, 18.69317054748535, 23.13019371032715, 27.72362518310547, 32.1606559753418, 36.168827056884766, 39.57627868652344, 42.32667541503906, 44.45526885986328, 46.04962921142578, 47.21482849121094, 48.05115509033203, 48.64370346069336, 49.05967712402344, 49.34980392456055, 49.551246643066406, 49.69068145751953, 49.78697967529297, 49.85338592529297 ], "original_max_position_embeddings": 32768, "rope_type": "longrope", "short_factor": [ 1.0004360675811768, 1.0668443441390991, 1.1631425619125366, 1.3025742769241333, 1.5040205717086792, 1.7941505908966064, 2.2101221084594727, 2.802666664123535, 3.6389970779418945, 4.804192543029785, 6.39855432510376, 8.527148246765137, 11.277542114257812, 14.684998512268066, 18.69317054748535, 23.13019371032715, 27.72362518310547, 32.1606559753418, 36.168827056884766, 39.57627868652344, 42.32667541503906, 44.45526885986328, 46.04962921142578, 47.21482849121094, 48.05115509033203, 48.64370346069336, 49.05967712402344, 49.34980392456055, 49.551246643066406, 49.69068145751953, 49.78697967529297, 49.85338592529297 ] }, "rope_theta": 10000.0, "scale_depth": 1.4, "scale_emb": 12, "sparse_config": null, "torch_dtype": "bfloat16", "transformers_version": "4.51.3", "use_cache": true, "vocab_size": 73448 }