vedantjumle commited on
Commit
ac29ac7
·
1 Parent(s): a1cff5a

Training in progress epoch 0

Browse files
Files changed (4) hide show
  1. README.md +7 -5
  2. config.json +198 -16
  3. tf_model.h5 +2 -2
  4. tokenizer_config.json +1 -1
README.md CHANGED
@@ -1,4 +1,6 @@
1
  ---
 
 
2
  tags:
3
  - generated_from_keras_callback
4
  model-index:
@@ -11,11 +13,11 @@ probably proofread and complete it, then remove this comment. -->
11
 
12
  # vedantjumle/indo-ml-final-test-bert
13
 
14
- This model was trained from scratch on an unknown dataset.
15
  It achieves the following results on the evaluation set:
16
- - Train Loss: nan
17
- - Validation Loss: nan
18
- - Train Accuracy: 0.0067
19
  - Epoch: 0
20
 
21
  ## Model description
@@ -42,7 +44,7 @@ The following hyperparameters were used during training:
42
 
43
  | Train Loss | Validation Loss | Train Accuracy | Epoch |
44
  |:----------:|:---------------:|:--------------:|:-----:|
45
- | nan | nan | 0.0067 | 0 |
46
 
47
 
48
  ### Framework versions
 
1
  ---
2
+ license: apache-2.0
3
+ base_model: bert-large-uncased
4
  tags:
5
  - generated_from_keras_callback
6
  model-index:
 
13
 
14
  # vedantjumle/indo-ml-final-test-bert
15
 
16
+ This model is a fine-tuned version of [bert-large-uncased](https://huggingface.co/bert-large-uncased) on an unknown dataset.
17
  It achieves the following results on the evaluation set:
18
+ - Train Loss: 5.0224
19
+ - Validation Loss: 4.8502
20
+ - Train Accuracy: 0.0433
21
  - Epoch: 0
22
 
23
  ## Model description
 
44
 
45
  | Train Loss | Validation Loss | Train Accuracy | Epoch |
46
  |:----------:|:---------------:|:--------------:|:-----:|
47
+ | 5.0224 | 4.8502 | 0.0433 | 0 |
48
 
49
 
50
  ### Framework versions
config.json CHANGED
@@ -1,13 +1,14 @@
1
  {
2
- "_name_or_path": "./transformers-models/bert",
3
- "activation": "gelu",
4
  "architectures": [
5
- "DistilBertForSequenceClassification"
6
  ],
7
- "attention_dropout": 0.1,
8
- "dim": 768,
9
- "dropout": 0.1,
10
- "hidden_dim": 3072,
 
 
11
  "id2label": {
12
  "0": "LABEL_0",
13
  "1": "LABEL_1",
@@ -68,17 +69,158 @@
68
  "56": "LABEL_56",
69
  "57": "LABEL_57",
70
  "58": "LABEL_58",
71
- "59": "LABEL_59"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
72
  },
73
  "initializer_range": 0.02,
 
74
  "label2id": {
75
  "LABEL_0": 0,
76
  "LABEL_1": 1,
77
  "LABEL_10": 10,
 
 
 
 
 
 
 
 
 
 
78
  "LABEL_11": 11,
 
 
 
 
 
 
 
 
 
 
79
  "LABEL_12": 12,
 
 
 
 
 
 
 
 
 
 
80
  "LABEL_13": 13,
 
 
 
 
 
 
 
 
 
 
81
  "LABEL_14": 14,
 
 
 
 
 
 
 
 
 
 
82
  "LABEL_15": 15,
83
  "LABEL_16": 16,
84
  "LABEL_17": 17,
@@ -129,19 +271,59 @@
129
  "LABEL_58": 58,
130
  "LABEL_59": 59,
131
  "LABEL_6": 6,
 
 
 
 
 
 
 
 
 
 
132
  "LABEL_7": 7,
 
 
 
 
 
 
 
 
 
 
133
  "LABEL_8": 8,
134
- "LABEL_9": 9
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
135
  },
 
136
  "max_position_embeddings": 512,
137
- "model_type": "distilbert",
138
- "n_heads": 12,
139
- "n_layers": 6,
140
  "pad_token_id": 0,
141
- "qa_dropout": 0.1,
142
- "seq_classif_dropout": 0.2,
143
- "sinusoidal_pos_embds": false,
144
- "tie_weights_": true,
145
  "transformers_version": "4.34.0",
 
 
146
  "vocab_size": 30522
147
  }
 
1
  {
2
+ "_name_or_path": "bert-large-uncased",
 
3
  "architectures": [
4
+ "BertForSequenceClassification"
5
  ],
6
+ "attention_probs_dropout_prob": 0.1,
7
+ "classifier_dropout": null,
8
+ "gradient_checkpointing": false,
9
+ "hidden_act": "gelu",
10
+ "hidden_dropout_prob": 0.1,
11
+ "hidden_size": 1024,
12
  "id2label": {
13
  "0": "LABEL_0",
14
  "1": "LABEL_1",
 
69
  "56": "LABEL_56",
70
  "57": "LABEL_57",
71
  "58": "LABEL_58",
72
+ "59": "LABEL_59",
73
+ "60": "LABEL_60",
74
+ "61": "LABEL_61",
75
+ "62": "LABEL_62",
76
+ "63": "LABEL_63",
77
+ "64": "LABEL_64",
78
+ "65": "LABEL_65",
79
+ "66": "LABEL_66",
80
+ "67": "LABEL_67",
81
+ "68": "LABEL_68",
82
+ "69": "LABEL_69",
83
+ "70": "LABEL_70",
84
+ "71": "LABEL_71",
85
+ "72": "LABEL_72",
86
+ "73": "LABEL_73",
87
+ "74": "LABEL_74",
88
+ "75": "LABEL_75",
89
+ "76": "LABEL_76",
90
+ "77": "LABEL_77",
91
+ "78": "LABEL_78",
92
+ "79": "LABEL_79",
93
+ "80": "LABEL_80",
94
+ "81": "LABEL_81",
95
+ "82": "LABEL_82",
96
+ "83": "LABEL_83",
97
+ "84": "LABEL_84",
98
+ "85": "LABEL_85",
99
+ "86": "LABEL_86",
100
+ "87": "LABEL_87",
101
+ "88": "LABEL_88",
102
+ "89": "LABEL_89",
103
+ "90": "LABEL_90",
104
+ "91": "LABEL_91",
105
+ "92": "LABEL_92",
106
+ "93": "LABEL_93",
107
+ "94": "LABEL_94",
108
+ "95": "LABEL_95",
109
+ "96": "LABEL_96",
110
+ "97": "LABEL_97",
111
+ "98": "LABEL_98",
112
+ "99": "LABEL_99",
113
+ "100": "LABEL_100",
114
+ "101": "LABEL_101",
115
+ "102": "LABEL_102",
116
+ "103": "LABEL_103",
117
+ "104": "LABEL_104",
118
+ "105": "LABEL_105",
119
+ "106": "LABEL_106",
120
+ "107": "LABEL_107",
121
+ "108": "LABEL_108",
122
+ "109": "LABEL_109",
123
+ "110": "LABEL_110",
124
+ "111": "LABEL_111",
125
+ "112": "LABEL_112",
126
+ "113": "LABEL_113",
127
+ "114": "LABEL_114",
128
+ "115": "LABEL_115",
129
+ "116": "LABEL_116",
130
+ "117": "LABEL_117",
131
+ "118": "LABEL_118",
132
+ "119": "LABEL_119",
133
+ "120": "LABEL_120",
134
+ "121": "LABEL_121",
135
+ "122": "LABEL_122",
136
+ "123": "LABEL_123",
137
+ "124": "LABEL_124",
138
+ "125": "LABEL_125",
139
+ "126": "LABEL_126",
140
+ "127": "LABEL_127",
141
+ "128": "LABEL_128",
142
+ "129": "LABEL_129",
143
+ "130": "LABEL_130",
144
+ "131": "LABEL_131",
145
+ "132": "LABEL_132",
146
+ "133": "LABEL_133",
147
+ "134": "LABEL_134",
148
+ "135": "LABEL_135",
149
+ "136": "LABEL_136",
150
+ "137": "LABEL_137",
151
+ "138": "LABEL_138",
152
+ "139": "LABEL_139",
153
+ "140": "LABEL_140",
154
+ "141": "LABEL_141",
155
+ "142": "LABEL_142",
156
+ "143": "LABEL_143",
157
+ "144": "LABEL_144",
158
+ "145": "LABEL_145",
159
+ "146": "LABEL_146",
160
+ "147": "LABEL_147",
161
+ "148": "LABEL_148",
162
+ "149": "LABEL_149"
163
  },
164
  "initializer_range": 0.02,
165
+ "intermediate_size": 4096,
166
  "label2id": {
167
  "LABEL_0": 0,
168
  "LABEL_1": 1,
169
  "LABEL_10": 10,
170
+ "LABEL_100": 100,
171
+ "LABEL_101": 101,
172
+ "LABEL_102": 102,
173
+ "LABEL_103": 103,
174
+ "LABEL_104": 104,
175
+ "LABEL_105": 105,
176
+ "LABEL_106": 106,
177
+ "LABEL_107": 107,
178
+ "LABEL_108": 108,
179
+ "LABEL_109": 109,
180
  "LABEL_11": 11,
181
+ "LABEL_110": 110,
182
+ "LABEL_111": 111,
183
+ "LABEL_112": 112,
184
+ "LABEL_113": 113,
185
+ "LABEL_114": 114,
186
+ "LABEL_115": 115,
187
+ "LABEL_116": 116,
188
+ "LABEL_117": 117,
189
+ "LABEL_118": 118,
190
+ "LABEL_119": 119,
191
  "LABEL_12": 12,
192
+ "LABEL_120": 120,
193
+ "LABEL_121": 121,
194
+ "LABEL_122": 122,
195
+ "LABEL_123": 123,
196
+ "LABEL_124": 124,
197
+ "LABEL_125": 125,
198
+ "LABEL_126": 126,
199
+ "LABEL_127": 127,
200
+ "LABEL_128": 128,
201
+ "LABEL_129": 129,
202
  "LABEL_13": 13,
203
+ "LABEL_130": 130,
204
+ "LABEL_131": 131,
205
+ "LABEL_132": 132,
206
+ "LABEL_133": 133,
207
+ "LABEL_134": 134,
208
+ "LABEL_135": 135,
209
+ "LABEL_136": 136,
210
+ "LABEL_137": 137,
211
+ "LABEL_138": 138,
212
+ "LABEL_139": 139,
213
  "LABEL_14": 14,
214
+ "LABEL_140": 140,
215
+ "LABEL_141": 141,
216
+ "LABEL_142": 142,
217
+ "LABEL_143": 143,
218
+ "LABEL_144": 144,
219
+ "LABEL_145": 145,
220
+ "LABEL_146": 146,
221
+ "LABEL_147": 147,
222
+ "LABEL_148": 148,
223
+ "LABEL_149": 149,
224
  "LABEL_15": 15,
225
  "LABEL_16": 16,
226
  "LABEL_17": 17,
 
271
  "LABEL_58": 58,
272
  "LABEL_59": 59,
273
  "LABEL_6": 6,
274
+ "LABEL_60": 60,
275
+ "LABEL_61": 61,
276
+ "LABEL_62": 62,
277
+ "LABEL_63": 63,
278
+ "LABEL_64": 64,
279
+ "LABEL_65": 65,
280
+ "LABEL_66": 66,
281
+ "LABEL_67": 67,
282
+ "LABEL_68": 68,
283
+ "LABEL_69": 69,
284
  "LABEL_7": 7,
285
+ "LABEL_70": 70,
286
+ "LABEL_71": 71,
287
+ "LABEL_72": 72,
288
+ "LABEL_73": 73,
289
+ "LABEL_74": 74,
290
+ "LABEL_75": 75,
291
+ "LABEL_76": 76,
292
+ "LABEL_77": 77,
293
+ "LABEL_78": 78,
294
+ "LABEL_79": 79,
295
  "LABEL_8": 8,
296
+ "LABEL_80": 80,
297
+ "LABEL_81": 81,
298
+ "LABEL_82": 82,
299
+ "LABEL_83": 83,
300
+ "LABEL_84": 84,
301
+ "LABEL_85": 85,
302
+ "LABEL_86": 86,
303
+ "LABEL_87": 87,
304
+ "LABEL_88": 88,
305
+ "LABEL_89": 89,
306
+ "LABEL_9": 9,
307
+ "LABEL_90": 90,
308
+ "LABEL_91": 91,
309
+ "LABEL_92": 92,
310
+ "LABEL_93": 93,
311
+ "LABEL_94": 94,
312
+ "LABEL_95": 95,
313
+ "LABEL_96": 96,
314
+ "LABEL_97": 97,
315
+ "LABEL_98": 98,
316
+ "LABEL_99": 99
317
  },
318
+ "layer_norm_eps": 1e-12,
319
  "max_position_embeddings": 512,
320
+ "model_type": "bert",
321
+ "num_attention_heads": 16,
322
+ "num_hidden_layers": 24,
323
  "pad_token_id": 0,
324
+ "position_embedding_type": "absolute",
 
 
 
325
  "transformers_version": "4.34.0",
326
+ "type_vocab_size": 2,
327
+ "use_cache": true,
328
  "vocab_size": 30522
329
  }
tf_model.h5 CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:59b3e74682b8f00f1aa2e89858ca7463b5d66f310208e3e49d63bc301c3915a4
3
- size 268129984
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4ed0d8dff721ec71598c4cf1620f433b25cf9ef394d36d123909835299cb11e3
3
+ size 1341734528
tokenizer_config.json CHANGED
@@ -51,6 +51,6 @@
51
  "sep_token": "[SEP]",
52
  "strip_accents": null,
53
  "tokenize_chinese_chars": true,
54
- "tokenizer_class": "DistilBertTokenizer",
55
  "unk_token": "[UNK]"
56
  }
 
51
  "sep_token": "[SEP]",
52
  "strip_accents": null,
53
  "tokenize_chinese_chars": true,
54
+ "tokenizer_class": "BertTokenizer",
55
  "unk_token": "[UNK]"
56
  }