Sam-2.0 / config.json
Keeby-smilyai's picture
Update config.json
040f043 verified
raw
history blame contribute delete
392 Bytes
{
"model_type": "sam2",
"architectures": ["Sam2ForCausalLM"],
"vocab_size": 50262,
"d_model": 384,
"n_layers": 6,
"n_heads": 6,
"ff_mult": 4.0,
"dropout": 0.1,
"input_modality": "text",
"head_type": "causal_lm",
"version": "0.1",
"pad_token_id": 50256,
"bos_token_id": 50256,
"eos_token_id": 50256,
"torch_dtype": "float32",
"transformers_version": "4.44.0"
}