{ "_name_or_path": "data/data_subcategorizer/results_train", "architectures": [ "XLMRobertaForSequenceClassification" ], "attention_probs_dropout_prob": 0.1, "bos_token_id": 0, "classifier_dropout": null, "eos_token_id": 2, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "hidden_size": 1024, "id2label": { "0": "\u041e\u0431\u0449\u0435\u043a\u043b\u0438\u043d\u0438\u0447\u0435\u0441\u043a\u0438\u0435", "1": "\u0411\u0438\u043e\u0445\u0438\u043c\u0438\u044f", "2": "\u0412\u0438\u0442\u0430\u043c\u0438\u043d\u044b", "3": "\u0418\u043d\u0444\u0435\u043a\u0446\u0438\u0438", "4": "\u0413\u0435\u043d\u0435\u0442\u0438\u043a\u0430", "5": "\u0410\u043b\u043b\u0435\u0440\u0433\u043e\u043b\u043e\u0433\u0438\u044f", "6": "\u041e\u043d\u043a\u043e\u043c\u0430\u0440\u043a\u0435\u0440\u044b", "7": "\u0418\u043c\u043c\u0443\u043d\u043e\u043b\u043e\u0433\u0438\u0447\u0435\u0441\u043a\u0438\u0435", "8": "\u0413\u043e\u0440\u043c\u043e\u043d\u044b", "9": "\u041c\u0438\u043a\u0440\u043e\u044d\u043b\u0435\u043c\u0435\u043d\u0442\u044b", "10": "\u0422\u043e\u043a\u0441\u0438\u043a\u043e\u043b\u043e\u0433\u0438\u044f" }, "initializer_range": 0.02, "intermediate_size": 4096, "label2id": { "\u0410\u043b\u043b\u0435\u0440\u0433\u043e\u043b\u043e\u0433\u0438\u044f": 5, "\u0411\u0438\u043e\u0445\u0438\u043c\u0438\u044f": 1, "\u0412\u0438\u0442\u0430\u043c\u0438\u043d\u044b": 2, "\u0413\u0435\u043d\u0435\u0442\u0438\u043a\u0430": 4, "\u0413\u043e\u0440\u043c\u043e\u043d\u044b": 8, "\u0418\u043c\u043c\u0443\u043d\u043e\u043b\u043e\u0433\u0438\u0447\u0435\u0441\u043a\u0438\u0435": 7, "\u0418\u043d\u0444\u0435\u043a\u0446\u0438\u0438": 3, "\u041c\u0438\u043a\u0440\u043e\u044d\u043b\u0435\u043c\u0435\u043d\u0442\u044b": 9, "\u041e\u0431\u0449\u0435\u043a\u043b\u0438\u043d\u0438\u0447\u0435\u0441\u043a\u0438\u0435": 0, "\u041e\u043d\u043a\u043e\u043c\u0430\u0440\u043a\u0435\u0440\u044b": 6, "\u0422\u043e\u043a\u0441\u0438\u043a\u043e\u043b\u043e\u0433\u0438\u044f": 10 }, "layer_norm_eps": 1e-05, "max_position_embeddings": 8194, "model_type": "xlm-roberta", "num_attention_heads": 16, "num_hidden_layers": 24, "output_past": true, "pad_token_id": 1, "position_embedding_type": "absolute", "problem_type": "single_label_classification", "torch_dtype": "float32", "transformers_version": "4.48.1", "type_vocab_size": 1, "use_cache": true, "vocab_size": 46166 }