{ "architectures": [ "EuroBertForTokenClassification" ], "attention_bias": false, "attention_dropout": 0.0, "auto_map": { "AutoConfig": "EuroBERT/EuroBERT-610m--configuration_eurobert.EuroBertConfig", "AutoModel": "EuroBERT/EuroBERT-610m--modeling_eurobert.EuroBertModel", "AutoModelForMaskedLM": "EuroBERT/EuroBERT-610m--modeling_eurobert.EuroBertForMaskedLM", "AutoModelForPreTraining": "EuroBERT/EuroBERT-610m--modeling_eurobert.EuroBertPreTrainedModel", "AutoModelForSequenceClassification": "EuroBERT/EuroBERT-610m--modeling_eurobert.EuroBertForSequenceClassification", "AutoModelForTokenClassification": "EuroBERT/EuroBERT-610m--modeling_eurobert.EuroBertForTokenClassification" }, "bos_token": "<|begin_of_text|>", "bos_token_id": 128000, "clf_pooling": "late", "eos_token": "<|end_of_text|>", "eos_token_id": 128001, "head_dim": 64, "hidden_act": "silu", "hidden_dropout": 0.0, "hidden_size": 1152, "id2label": { "0": "O", "1": "I-social group", "2": "I-political group", "3": "I-political institution", "4": "I-organization, public institution, or collective actor", "5": "I-implicit social group reference", "6": "B-social group", "7": "B-political group", "8": "B-political institution", "9": "B-organization, public institution, or collective actor", "10": "B-implicit social group reference" }, "initializer_range": 0.02, "intermediate_size": 4096, "label2id": { "B-implicit social group reference": 10, "B-organization, public institution, or collective actor": 9, "B-political group": 7, "B-political institution": 8, "B-social group": 6, "I-implicit social group reference": 5, "I-organization, public institution, or collective actor": 4, "I-political group": 2, "I-political institution": 3, "I-social group": 1, "O": 0 }, "mask_token": "<|mask|>", "mask_token_id": 128002, "max_position_embeddings": 8192, "mlp_bias": false, "model_type": "eurobert", "num_attention_heads": 18, "num_hidden_layers": 26, "num_key_value_heads": 6, "pad_token": "<|end_of_text|>", "pad_token_id": 128001, "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": null, "rope_theta": 250000, "tie_word_embeddings": false, "torch_dtype": "float32", "transformers_version": "4.51.3", "use_cache": false, "vocab_size": 128256 }