dendimaki commited on
Commit
0489cae
·
verified ·
1 Parent(s): dd8fa1f

Upload config

Browse files
Files changed (1) hide show
  1. config.json +93 -0
config.json ADDED
@@ -0,0 +1,93 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "dendimaki/bert-finetuned-combine",
3
+ "architectures": [
4
+ "BertForSequenceClassification"
5
+ ],
6
+ "attention_probs_dropout_prob": 0.1,
7
+ "classifier_dropout": null,
8
+ "gradient_checkpointing": false,
9
+ "hidden_act": "gelu",
10
+ "hidden_dropout_prob": 0.1,
11
+ "hidden_size": 768,
12
+ "id2label": {
13
+ "0": "loc1lay1",
14
+ "1": "loc1lay2",
15
+ "10": "loc3lay3",
16
+ "11": "loc3lay4",
17
+ "12": "loc4lay1",
18
+ "13": "loc4lay2",
19
+ "14": "loc4lay3",
20
+ "15": "loc4lay4",
21
+ "16": "loc5+",
22
+ "17": "loc1",
23
+ "18": "loc2",
24
+ "19": "loc3",
25
+ "2": "loc1lay3",
26
+ "20": "loc4",
27
+ "21": "loc5",
28
+ "22": "nfw",
29
+ "23": "tfw",
30
+ "24": "tfwc",
31
+ "25": "tfwp",
32
+ "26": "lay1",
33
+ "27": "fwb",
34
+ "28": "lay2",
35
+ "29": "lay3",
36
+ "3": "loc1lay4",
37
+ "30": "lay4",
38
+ "4": "loc2lay1",
39
+ "5": "loc2lay2",
40
+ "6": "loc2lay3",
41
+ "7": "loc2lay4",
42
+ "8": "loc3lay1",
43
+ "9": "loc3lay2"
44
+ },
45
+ "initializer_range": 0.02,
46
+ "intermediate_size": 3072,
47
+ "label2id": {
48
+ "fwb": 27,
49
+ "lay1": 26,
50
+ "lay2": 28,
51
+ "lay3": 29,
52
+ "lay4": 30,
53
+ "loc1": 17,
54
+ "loc1lay1": 0,
55
+ "loc1lay2": 1,
56
+ "loc1lay3": 2,
57
+ "loc1lay4": 3,
58
+ "loc2": 18,
59
+ "loc2lay1": 4,
60
+ "loc2lay2": 5,
61
+ "loc2lay3": 6,
62
+ "loc2lay4": 7,
63
+ "loc3": 19,
64
+ "loc3lay1": 8,
65
+ "loc3lay2": 9,
66
+ "loc3lay3": 10,
67
+ "loc3lay4": 11,
68
+ "loc4": 20,
69
+ "loc4lay1": 12,
70
+ "loc4lay2": 13,
71
+ "loc4lay3": 14,
72
+ "loc4lay4": 15,
73
+ "loc5": 21,
74
+ "loc5+": 16,
75
+ "nfw": 22,
76
+ "tfw": 23,
77
+ "tfwc": 24,
78
+ "tfwp": 25
79
+ },
80
+ "layer_norm_eps": 1e-12,
81
+ "max_position_embeddings": 512,
82
+ "model_type": "bert",
83
+ "num_attention_heads": 12,
84
+ "num_hidden_layers": 12,
85
+ "pad_token_id": 0,
86
+ "position_embedding_type": "absolute",
87
+ "problem_type": "single_label_classification",
88
+ "torch_dtype": "float32",
89
+ "transformers_version": "4.35.2",
90
+ "type_vocab_size": 2,
91
+ "use_cache": true,
92
+ "vocab_size": 30522
93
+ }