{ "_name_or_path": "/notebooks/plamo-2-1b-gorilla-chat2", "architectures": [ "PlamoForCausalLM" ], "attention_window_size": 2048, "auto_map": { "AutoConfig": "modeling_plamo.PlamoConfig", "AutoModelForCausalLM": "pfnet/plamo-2-1b--modeling_plamo.PlamoForCausalLM" }, "bos_token_id": 1, "capacity_factor": 1.0, "eos_token_id": 2, "eval_attention_n_bit": null, "eval_mlp_n_bit": null, "expert_dropout": 0.0, "fp8_accum_dtype": "bfloat16", "full_attention_idx": [], "group_size": 1024, "hidden_size": 2048, "hidden_size_per_head": 128, "image_feature_size": null, "image_proj_type": "linear", "image_token_id": null, "intermediate_size": 8192, "k_expert": null, "linear_type": "fp8", "mamba_chunk_size": 256, "mamba_d_conv": 4, "mamba_d_state": 64, "mamba_enabled": true, "mamba_num_heads": 32, "mamba_step": 2, "max_position_embeddings": 10485760, "model_type": "plamo", "n_expert": null, "num_attention_heads": 16, "num_hidden_layers": 16, "num_key_value_heads": 1, "rms_norm_eps": 1e-06, "shared_intermediate_size": null, "sliding_window": 2048, "sparse_intermediate_size": null, "sparse_step": null, "tokenizer_class": "PlamoTokenizer", "torch_dtype": "bfloat16", "transformers_version": "4.49.0", "use_cache": false, "use_predefined_initial_state": false, "vocab_size": 100000 }