{ "model_type": "sam2", "architectures": ["Sam2ForCausalLM"], "vocab_size": 50262, "d_model": 384, "n_layers": 6, "n_heads": 6, "ff_mult": 4.0, "dropout": 0.1, "input_modality": "text", "head_type": "causal_lm", "version": "0.1", "pad_token_id": 50256, "bos_token_id": 50256, "eos_token_id": 50256, "torch_dtype": "float32", "transformers_version": "4.44.0" }