sharkMeow commited on
Commit
53dfb60
·
verified ·
1 Parent(s): bdbc1c3

End of training

Browse files
all_results.json ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 99.91201764057332,
3
+ "total_flos": 5.364383961180322e+18,
4
+ "train_loss": 1.6900030501417007,
5
+ "train_runtime": 96835.149,
6
+ "train_samples_per_second": 140.49,
7
+ "train_steps_per_second": 1.17
8
+ }
preprocessor_config.json ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "crop_size": {
3
+ "height": 224,
4
+ "width": 224
5
+ },
6
+ "do_center_crop": false,
7
+ "do_convert_rgb": true,
8
+ "do_normalize": true,
9
+ "do_rescale": true,
10
+ "do_resize": true,
11
+ "image_mean": [
12
+ 0.48145466,
13
+ 0.4578275,
14
+ 0.40821073
15
+ ],
16
+ "image_processor_type": "ChineseCLIPImageProcessor",
17
+ "image_std": [
18
+ 0.26862954,
19
+ 0.26130258,
20
+ 0.27577711
21
+ ],
22
+ "resample": 3,
23
+ "rescale_factor": 0.00392156862745098,
24
+ "size": {
25
+ "height": 224,
26
+ "width": 224
27
+ }
28
+ }
special_tokens_map.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "cls_token": "[CLS]",
3
+ "mask_token": "[MASK]",
4
+ "pad_token": "[PAD]",
5
+ "sep_token": "[SEP]",
6
+ "unk_token": "[UNK]"
7
+ }
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json ADDED
@@ -0,0 +1,58 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "added_tokens_decoder": {
3
+ "0": {
4
+ "content": "[PAD]",
5
+ "lstrip": false,
6
+ "normalized": false,
7
+ "rstrip": false,
8
+ "single_word": false,
9
+ "special": true
10
+ },
11
+ "100": {
12
+ "content": "[UNK]",
13
+ "lstrip": false,
14
+ "normalized": false,
15
+ "rstrip": false,
16
+ "single_word": false,
17
+ "special": true
18
+ },
19
+ "101": {
20
+ "content": "[CLS]",
21
+ "lstrip": false,
22
+ "normalized": false,
23
+ "rstrip": false,
24
+ "single_word": false,
25
+ "special": true
26
+ },
27
+ "102": {
28
+ "content": "[SEP]",
29
+ "lstrip": false,
30
+ "normalized": false,
31
+ "rstrip": false,
32
+ "single_word": false,
33
+ "special": true
34
+ },
35
+ "103": {
36
+ "content": "[MASK]",
37
+ "lstrip": false,
38
+ "normalized": false,
39
+ "rstrip": false,
40
+ "single_word": false,
41
+ "special": true
42
+ }
43
+ },
44
+ "clean_up_tokenization_spaces": true,
45
+ "cls_token": "[CLS]",
46
+ "do_basic_tokenize": true,
47
+ "do_lower_case": true,
48
+ "extra_special_tokens": {},
49
+ "mask_token": "[MASK]",
50
+ "model_max_length": 1000000000000000019884624838656,
51
+ "never_split": null,
52
+ "pad_token": "[PAD]",
53
+ "sep_token": "[SEP]",
54
+ "strip_accents": null,
55
+ "tokenize_chinese_chars": true,
56
+ "tokenizer_class": "BertTokenizer",
57
+ "unk_token": "[UNK]"
58
+ }
train_results.json ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 99.91201764057332,
3
+ "total_flos": 5.364383961180322e+18,
4
+ "train_loss": 1.6900030501417007,
5
+ "train_runtime": 96835.149,
6
+ "train_samples_per_second": 140.49,
7
+ "train_steps_per_second": 1.17
8
+ }
trainer_state.json ADDED
@@ -0,0 +1,743 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_global_step": null,
3
+ "best_metric": null,
4
+ "best_model_checkpoint": null,
5
+ "epoch": 99.91201764057332,
6
+ "eval_steps": 500,
7
+ "global_step": 113300,
8
+ "is_hyper_param_search": false,
9
+ "is_local_process_zero": true,
10
+ "is_world_process_zero": true,
11
+ "log_history": [
12
+ {
13
+ "epoch": 0.9993384785005512,
14
+ "grad_norm": 6.264042854309082,
15
+ "learning_rate": 9.90035304501324e-06,
16
+ "loss": 2.1264,
17
+ "step": 1133
18
+ },
19
+ {
20
+ "epoch": 1.9984564498346198,
21
+ "grad_norm": 8.962105751037598,
22
+ "learning_rate": 9.80035304501324e-06,
23
+ "loss": 2.0771,
24
+ "step": 2266
25
+ },
26
+ {
27
+ "epoch": 2.997574421168688,
28
+ "grad_norm": 13.9079008102417,
29
+ "learning_rate": 9.70044130626655e-06,
30
+ "loss": 2.0072,
31
+ "step": 3399
32
+ },
33
+ {
34
+ "epoch": 3.9966923925027564,
35
+ "grad_norm": 9.212200164794922,
36
+ "learning_rate": 9.60044130626655e-06,
37
+ "loss": 1.9554,
38
+ "step": 4532
39
+ },
40
+ {
41
+ "epoch": 4.995810363836824,
42
+ "grad_norm": 9.967486381530762,
43
+ "learning_rate": 9.50052956751986e-06,
44
+ "loss": 1.9156,
45
+ "step": 5665
46
+ },
47
+ {
48
+ "epoch": 5.9949283351708935,
49
+ "grad_norm": 9.588685989379883,
50
+ "learning_rate": 9.40061782877317e-06,
51
+ "loss": 1.9031,
52
+ "step": 6798
53
+ },
54
+ {
55
+ "epoch": 6.994046306504961,
56
+ "grad_norm": 5.2934889793396,
57
+ "learning_rate": 9.30061782877317e-06,
58
+ "loss": 1.8739,
59
+ "step": 7931
60
+ },
61
+ {
62
+ "epoch": 7.99316427783903,
63
+ "grad_norm": 12.791667938232422,
64
+ "learning_rate": 9.20070609002648e-06,
65
+ "loss": 1.8476,
66
+ "step": 9064
67
+ },
68
+ {
69
+ "epoch": 8.992282249173098,
70
+ "grad_norm": 5.592958927154541,
71
+ "learning_rate": 9.100706090026479e-06,
72
+ "loss": 1.8378,
73
+ "step": 10197
74
+ },
75
+ {
76
+ "epoch": 9.991400220507167,
77
+ "grad_norm": 6.236687660217285,
78
+ "learning_rate": 9.000706090026478e-06,
79
+ "loss": 1.8141,
80
+ "step": 11330
81
+ },
82
+ {
83
+ "epoch": 10.990518191841234,
84
+ "grad_norm": 14.459975242614746,
85
+ "learning_rate": 8.90079435127979e-06,
86
+ "loss": 1.8008,
87
+ "step": 12463
88
+ },
89
+ {
90
+ "epoch": 11.989636163175303,
91
+ "grad_norm": 7.29625940322876,
92
+ "learning_rate": 8.800794351279789e-06,
93
+ "loss": 1.7965,
94
+ "step": 13596
95
+ },
96
+ {
97
+ "epoch": 12.98875413450937,
98
+ "grad_norm": 7.927811622619629,
99
+ "learning_rate": 8.7008826125331e-06,
100
+ "loss": 1.7768,
101
+ "step": 14729
102
+ },
103
+ {
104
+ "epoch": 13.98787210584344,
105
+ "grad_norm": 7.546788692474365,
106
+ "learning_rate": 8.600882612533099e-06,
107
+ "loss": 1.7735,
108
+ "step": 15862
109
+ },
110
+ {
111
+ "epoch": 14.986990077177508,
112
+ "grad_norm": 2.396638870239258,
113
+ "learning_rate": 8.500970873786408e-06,
114
+ "loss": 1.7688,
115
+ "step": 16995
116
+ },
117
+ {
118
+ "epoch": 15.986108048511577,
119
+ "grad_norm": 4.344698905944824,
120
+ "learning_rate": 8.400970873786408e-06,
121
+ "loss": 1.7521,
122
+ "step": 18128
123
+ },
124
+ {
125
+ "epoch": 16.985226019845644,
126
+ "grad_norm": 3.3172178268432617,
127
+ "learning_rate": 8.300970873786409e-06,
128
+ "loss": 1.7524,
129
+ "step": 19261
130
+ },
131
+ {
132
+ "epoch": 17.984343991179713,
133
+ "grad_norm": 4.233256816864014,
134
+ "learning_rate": 8.200970873786408e-06,
135
+ "loss": 1.7394,
136
+ "step": 20394
137
+ },
138
+ {
139
+ "epoch": 18.983461962513783,
140
+ "grad_norm": 3.924440860748291,
141
+ "learning_rate": 8.101147396293027e-06,
142
+ "loss": 1.7355,
143
+ "step": 21527
144
+ },
145
+ {
146
+ "epoch": 19.982579933847852,
147
+ "grad_norm": 8.452503204345703,
148
+ "learning_rate": 8.001147396293027e-06,
149
+ "loss": 1.7245,
150
+ "step": 22660
151
+ },
152
+ {
153
+ "epoch": 20.981697905181917,
154
+ "grad_norm": 10.714750289916992,
155
+ "learning_rate": 7.901147396293028e-06,
156
+ "loss": 1.7205,
157
+ "step": 23793
158
+ },
159
+ {
160
+ "epoch": 21.980815876515987,
161
+ "grad_norm": 12.158623695373535,
162
+ "learning_rate": 7.801147396293029e-06,
163
+ "loss": 1.7207,
164
+ "step": 24926
165
+ },
166
+ {
167
+ "epoch": 22.979933847850056,
168
+ "grad_norm": 2.1460916996002197,
169
+ "learning_rate": 7.701323918799648e-06,
170
+ "loss": 1.716,
171
+ "step": 26059
172
+ },
173
+ {
174
+ "epoch": 23.979051819184125,
175
+ "grad_norm": 2.6795060634613037,
176
+ "learning_rate": 7.601323918799648e-06,
177
+ "loss": 1.71,
178
+ "step": 27192
179
+ },
180
+ {
181
+ "epoch": 24.97816979051819,
182
+ "grad_norm": 4.405874729156494,
183
+ "learning_rate": 7.501412180052957e-06,
184
+ "loss": 1.7143,
185
+ "step": 28325
186
+ },
187
+ {
188
+ "epoch": 25.97728776185226,
189
+ "grad_norm": 2.3188281059265137,
190
+ "learning_rate": 7.401412180052957e-06,
191
+ "loss": 1.6879,
192
+ "step": 29458
193
+ },
194
+ {
195
+ "epoch": 26.97640573318633,
196
+ "grad_norm": 4.247509002685547,
197
+ "learning_rate": 7.301412180052957e-06,
198
+ "loss": 1.7008,
199
+ "step": 30591
200
+ },
201
+ {
202
+ "epoch": 27.9755237045204,
203
+ "grad_norm": 5.176025390625,
204
+ "learning_rate": 7.2015004413062674e-06,
205
+ "loss": 1.6904,
206
+ "step": 31724
207
+ },
208
+ {
209
+ "epoch": 28.974641675854464,
210
+ "grad_norm": 3.4096691608428955,
211
+ "learning_rate": 7.1015004413062675e-06,
212
+ "loss": 1.6884,
213
+ "step": 32857
214
+ },
215
+ {
216
+ "epoch": 29.973759647188533,
217
+ "grad_norm": 5.346144676208496,
218
+ "learning_rate": 7.001588702559578e-06,
219
+ "loss": 1.6874,
220
+ "step": 33990
221
+ },
222
+ {
223
+ "epoch": 30.972877618522602,
224
+ "grad_norm": 4.809396743774414,
225
+ "learning_rate": 6.901588702559577e-06,
226
+ "loss": 1.6834,
227
+ "step": 35123
228
+ },
229
+ {
230
+ "epoch": 31.97199558985667,
231
+ "grad_norm": 4.950044631958008,
232
+ "learning_rate": 6.8016769638128865e-06,
233
+ "loss": 1.6704,
234
+ "step": 36256
235
+ },
236
+ {
237
+ "epoch": 32.97111356119074,
238
+ "grad_norm": 2.259593963623047,
239
+ "learning_rate": 6.701676963812887e-06,
240
+ "loss": 1.676,
241
+ "step": 37389
242
+ },
243
+ {
244
+ "epoch": 33.97023153252481,
245
+ "grad_norm": 1.9857131242752075,
246
+ "learning_rate": 6.601765225066197e-06,
247
+ "loss": 1.6721,
248
+ "step": 38522
249
+ },
250
+ {
251
+ "epoch": 34.96934950385887,
252
+ "grad_norm": 2.6523988246917725,
253
+ "learning_rate": 6.501765225066197e-06,
254
+ "loss": 1.6519,
255
+ "step": 39655
256
+ },
257
+ {
258
+ "epoch": 35.968467475192945,
259
+ "grad_norm": 2.5178589820861816,
260
+ "learning_rate": 6.401853486319506e-06,
261
+ "loss": 1.6763,
262
+ "step": 40788
263
+ },
264
+ {
265
+ "epoch": 36.96758544652701,
266
+ "grad_norm": 3.966003894805908,
267
+ "learning_rate": 6.301941747572816e-06,
268
+ "loss": 1.6729,
269
+ "step": 41921
270
+ },
271
+ {
272
+ "epoch": 37.96670341786108,
273
+ "grad_norm": 2.874756336212158,
274
+ "learning_rate": 6.201941747572816e-06,
275
+ "loss": 1.6761,
276
+ "step": 43054
277
+ },
278
+ {
279
+ "epoch": 38.96582138919515,
280
+ "grad_norm": 5.206564426422119,
281
+ "learning_rate": 6.102030008826125e-06,
282
+ "loss": 1.6731,
283
+ "step": 44187
284
+ },
285
+ {
286
+ "epoch": 39.964939360529215,
287
+ "grad_norm": 3.3622446060180664,
288
+ "learning_rate": 6.002030008826126e-06,
289
+ "loss": 1.6649,
290
+ "step": 45320
291
+ },
292
+ {
293
+ "epoch": 40.96405733186329,
294
+ "grad_norm": 2.243980884552002,
295
+ "learning_rate": 5.902118270079436e-06,
296
+ "loss": 1.6617,
297
+ "step": 46453
298
+ },
299
+ {
300
+ "epoch": 41.96317530319735,
301
+ "grad_norm": 2.656219720840454,
302
+ "learning_rate": 5.802118270079435e-06,
303
+ "loss": 1.6539,
304
+ "step": 47586
305
+ },
306
+ {
307
+ "epoch": 42.96229327453142,
308
+ "grad_norm": 2.1969571113586426,
309
+ "learning_rate": 5.702206531332745e-06,
310
+ "loss": 1.6507,
311
+ "step": 48719
312
+ },
313
+ {
314
+ "epoch": 43.96141124586549,
315
+ "grad_norm": 2.570960760116577,
316
+ "learning_rate": 5.602294792586055e-06,
317
+ "loss": 1.6517,
318
+ "step": 49852
319
+ },
320
+ {
321
+ "epoch": 44.96052921719956,
322
+ "grad_norm": 2.2769925594329834,
323
+ "learning_rate": 5.502294792586055e-06,
324
+ "loss": 1.6622,
325
+ "step": 50985
326
+ },
327
+ {
328
+ "epoch": 45.95964718853363,
329
+ "grad_norm": 1.3121511936187744,
330
+ "learning_rate": 5.402383053839365e-06,
331
+ "loss": 1.6583,
332
+ "step": 52118
333
+ },
334
+ {
335
+ "epoch": 46.958765159867696,
336
+ "grad_norm": 2.186378002166748,
337
+ "learning_rate": 5.302383053839365e-06,
338
+ "loss": 1.6579,
339
+ "step": 53251
340
+ },
341
+ {
342
+ "epoch": 47.95788313120176,
343
+ "grad_norm": 1.257639765739441,
344
+ "learning_rate": 5.202471315092674e-06,
345
+ "loss": 1.653,
346
+ "step": 54384
347
+ },
348
+ {
349
+ "epoch": 48.957001102535834,
350
+ "grad_norm": 2.570171594619751,
351
+ "learning_rate": 5.102471315092676e-06,
352
+ "loss": 1.6503,
353
+ "step": 55517
354
+ },
355
+ {
356
+ "epoch": 49.9561190738699,
357
+ "grad_norm": 1.8046927452087402,
358
+ "learning_rate": 5.002559576345984e-06,
359
+ "loss": 1.6569,
360
+ "step": 56650
361
+ },
362
+ {
363
+ "epoch": 50.95523704520397,
364
+ "grad_norm": 2.055699348449707,
365
+ "learning_rate": 4.9026478375992945e-06,
366
+ "loss": 1.649,
367
+ "step": 57783
368
+ },
369
+ {
370
+ "epoch": 51.95435501653804,
371
+ "grad_norm": 1.5541220903396606,
372
+ "learning_rate": 4.802647837599294e-06,
373
+ "loss": 1.6463,
374
+ "step": 58916
375
+ },
376
+ {
377
+ "epoch": 52.953472987872104,
378
+ "grad_norm": 1.3364524841308594,
379
+ "learning_rate": 4.702736098852604e-06,
380
+ "loss": 1.6406,
381
+ "step": 60049
382
+ },
383
+ {
384
+ "epoch": 53.95259095920618,
385
+ "grad_norm": 1.8428481817245483,
386
+ "learning_rate": 4.602736098852604e-06,
387
+ "loss": 1.6443,
388
+ "step": 61182
389
+ },
390
+ {
391
+ "epoch": 54.95170893054024,
392
+ "grad_norm": 1.3728575706481934,
393
+ "learning_rate": 4.5028243601059144e-06,
394
+ "loss": 1.6427,
395
+ "step": 62315
396
+ },
397
+ {
398
+ "epoch": 55.95082690187431,
399
+ "grad_norm": 2.039762496948242,
400
+ "learning_rate": 4.402824360105914e-06,
401
+ "loss": 1.6523,
402
+ "step": 63448
403
+ },
404
+ {
405
+ "epoch": 56.94994487320838,
406
+ "grad_norm": 0.4440517723560333,
407
+ "learning_rate": 4.302912621359224e-06,
408
+ "loss": 1.6346,
409
+ "step": 64581
410
+ },
411
+ {
412
+ "epoch": 57.949062844542446,
413
+ "grad_norm": 2.8719191551208496,
414
+ "learning_rate": 4.202912621359223e-06,
415
+ "loss": 1.6481,
416
+ "step": 65714
417
+ },
418
+ {
419
+ "epoch": 58.94818081587652,
420
+ "grad_norm": 2.116090774536133,
421
+ "learning_rate": 4.1030008826125335e-06,
422
+ "loss": 1.6403,
423
+ "step": 66847
424
+ },
425
+ {
426
+ "epoch": 59.947298787210585,
427
+ "grad_norm": 2.5801730155944824,
428
+ "learning_rate": 4.003089143865843e-06,
429
+ "loss": 1.6445,
430
+ "step": 67980
431
+ },
432
+ {
433
+ "epoch": 60.94641675854465,
434
+ "grad_norm": 1.5309211015701294,
435
+ "learning_rate": 3.903089143865843e-06,
436
+ "loss": 1.6495,
437
+ "step": 69113
438
+ },
439
+ {
440
+ "epoch": 61.94553472987872,
441
+ "grad_norm": 2.1777145862579346,
442
+ "learning_rate": 3.803177405119153e-06,
443
+ "loss": 1.6477,
444
+ "step": 70246
445
+ },
446
+ {
447
+ "epoch": 62.94465270121279,
448
+ "grad_norm": 1.003503441810608,
449
+ "learning_rate": 3.7031774051191535e-06,
450
+ "loss": 1.6387,
451
+ "step": 71379
452
+ },
453
+ {
454
+ "epoch": 63.943770672546854,
455
+ "grad_norm": 2.015745162963867,
456
+ "learning_rate": 3.603265666372463e-06,
457
+ "loss": 1.6307,
458
+ "step": 72512
459
+ },
460
+ {
461
+ "epoch": 64.94288864388092,
462
+ "grad_norm": 2.482320547103882,
463
+ "learning_rate": 3.503265666372463e-06,
464
+ "loss": 1.6402,
465
+ "step": 73645
466
+ },
467
+ {
468
+ "epoch": 65.94200661521499,
469
+ "grad_norm": 2.404247999191284,
470
+ "learning_rate": 3.403353927625773e-06,
471
+ "loss": 1.6409,
472
+ "step": 74778
473
+ },
474
+ {
475
+ "epoch": 66.94112458654907,
476
+ "grad_norm": 1.197210669517517,
477
+ "learning_rate": 3.3034421888790823e-06,
478
+ "loss": 1.6518,
479
+ "step": 75911
480
+ },
481
+ {
482
+ "epoch": 67.94024255788312,
483
+ "grad_norm": 0.6208453178405762,
484
+ "learning_rate": 3.2034421888790824e-06,
485
+ "loss": 1.6461,
486
+ "step": 77044
487
+ },
488
+ {
489
+ "epoch": 68.9393605292172,
490
+ "grad_norm": 1.967084527015686,
491
+ "learning_rate": 3.1035304501323922e-06,
492
+ "loss": 1.6393,
493
+ "step": 78177
494
+ },
495
+ {
496
+ "epoch": 69.93847850055127,
497
+ "grad_norm": 1.0003052949905396,
498
+ "learning_rate": 3.003530450132392e-06,
499
+ "loss": 1.6259,
500
+ "step": 79310
501
+ },
502
+ {
503
+ "epoch": 70.93759647188534,
504
+ "grad_norm": 0.6473856568336487,
505
+ "learning_rate": 2.9036187113857018e-06,
506
+ "loss": 1.6302,
507
+ "step": 80443
508
+ },
509
+ {
510
+ "epoch": 71.9367144432194,
511
+ "grad_norm": 0.949741780757904,
512
+ "learning_rate": 2.8036187113857015e-06,
513
+ "loss": 1.6355,
514
+ "step": 81576
515
+ },
516
+ {
517
+ "epoch": 72.93583241455347,
518
+ "grad_norm": 1.2901132106781006,
519
+ "learning_rate": 2.7037069726390117e-06,
520
+ "loss": 1.6335,
521
+ "step": 82709
522
+ },
523
+ {
524
+ "epoch": 73.93495038588755,
525
+ "grad_norm": 1.8243396282196045,
526
+ "learning_rate": 2.6037952338923216e-06,
527
+ "loss": 1.6329,
528
+ "step": 83842
529
+ },
530
+ {
531
+ "epoch": 74.9340683572216,
532
+ "grad_norm": 0.6137579679489136,
533
+ "learning_rate": 2.5037952338923217e-06,
534
+ "loss": 1.6421,
535
+ "step": 84975
536
+ },
537
+ {
538
+ "epoch": 75.93318632855568,
539
+ "grad_norm": 1.270875096321106,
540
+ "learning_rate": 2.403883495145631e-06,
541
+ "loss": 1.628,
542
+ "step": 86108
543
+ },
544
+ {
545
+ "epoch": 76.93230429988975,
546
+ "grad_norm": 0.9131399393081665,
547
+ "learning_rate": 2.3038834951456313e-06,
548
+ "loss": 1.6235,
549
+ "step": 87241
550
+ },
551
+ {
552
+ "epoch": 77.93142227122381,
553
+ "grad_norm": 1.042668342590332,
554
+ "learning_rate": 2.203971756398941e-06,
555
+ "loss": 1.6323,
556
+ "step": 88374
557
+ },
558
+ {
559
+ "epoch": 78.93054024255788,
560
+ "grad_norm": 2.174466609954834,
561
+ "learning_rate": 2.103971756398941e-06,
562
+ "loss": 1.6263,
563
+ "step": 89507
564
+ },
565
+ {
566
+ "epoch": 79.92965821389195,
567
+ "grad_norm": 0.5852002501487732,
568
+ "learning_rate": 2.004060017652251e-06,
569
+ "loss": 1.6354,
570
+ "step": 90640
571
+ },
572
+ {
573
+ "epoch": 80.92877618522601,
574
+ "grad_norm": 0.41708850860595703,
575
+ "learning_rate": 1.9040600176522508e-06,
576
+ "loss": 1.6177,
577
+ "step": 91773
578
+ },
579
+ {
580
+ "epoch": 81.92789415656009,
581
+ "grad_norm": 1.2882851362228394,
582
+ "learning_rate": 1.8041482789055606e-06,
583
+ "loss": 1.6317,
584
+ "step": 92906
585
+ },
586
+ {
587
+ "epoch": 82.92701212789416,
588
+ "grad_norm": 1.3567571640014648,
589
+ "learning_rate": 1.7042365401588703e-06,
590
+ "loss": 1.6336,
591
+ "step": 94039
592
+ },
593
+ {
594
+ "epoch": 83.92613009922823,
595
+ "grad_norm": 1.7891902923583984,
596
+ "learning_rate": 1.6042365401588702e-06,
597
+ "loss": 1.6285,
598
+ "step": 95172
599
+ },
600
+ {
601
+ "epoch": 84.92524807056229,
602
+ "grad_norm": 1.0167814493179321,
603
+ "learning_rate": 1.5043248014121802e-06,
604
+ "loss": 1.6233,
605
+ "step": 96305
606
+ },
607
+ {
608
+ "epoch": 85.92436604189636,
609
+ "grad_norm": 0.6127368211746216,
610
+ "learning_rate": 1.4043248014121801e-06,
611
+ "loss": 1.6194,
612
+ "step": 97438
613
+ },
614
+ {
615
+ "epoch": 86.92348401323044,
616
+ "grad_norm": 0.4271755516529083,
617
+ "learning_rate": 1.30441306266549e-06,
618
+ "loss": 1.6292,
619
+ "step": 98571
620
+ },
621
+ {
622
+ "epoch": 87.9226019845645,
623
+ "grad_norm": 0.6342440843582153,
624
+ "learning_rate": 1.20441306266549e-06,
625
+ "loss": 1.6266,
626
+ "step": 99704
627
+ },
628
+ {
629
+ "epoch": 88.92171995589857,
630
+ "grad_norm": 0.4804386794567108,
631
+ "learning_rate": 1.1045013239187997e-06,
632
+ "loss": 1.624,
633
+ "step": 100837
634
+ },
635
+ {
636
+ "epoch": 89.92083792723264,
637
+ "grad_norm": 0.6205073595046997,
638
+ "learning_rate": 1.0045895851721096e-06,
639
+ "loss": 1.6225,
640
+ "step": 101970
641
+ },
642
+ {
643
+ "epoch": 90.9199558985667,
644
+ "grad_norm": 0.5994776487350464,
645
+ "learning_rate": 9.045895851721096e-07,
646
+ "loss": 1.6296,
647
+ "step": 103103
648
+ },
649
+ {
650
+ "epoch": 91.91907386990077,
651
+ "grad_norm": 1.454080581665039,
652
+ "learning_rate": 8.046778464254193e-07,
653
+ "loss": 1.6254,
654
+ "step": 104236
655
+ },
656
+ {
657
+ "epoch": 92.91819184123484,
658
+ "grad_norm": 0.8811701536178589,
659
+ "learning_rate": 7.046778464254193e-07,
660
+ "loss": 1.6295,
661
+ "step": 105369
662
+ },
663
+ {
664
+ "epoch": 93.9173098125689,
665
+ "grad_norm": 0.6332120895385742,
666
+ "learning_rate": 6.047661076787291e-07,
667
+ "loss": 1.6269,
668
+ "step": 106502
669
+ },
670
+ {
671
+ "epoch": 94.91642778390298,
672
+ "grad_norm": 1.2232398986816406,
673
+ "learning_rate": 5.047661076787291e-07,
674
+ "loss": 1.6254,
675
+ "step": 107635
676
+ },
677
+ {
678
+ "epoch": 95.91554575523705,
679
+ "grad_norm": 0.9554293751716614,
680
+ "learning_rate": 4.048543689320389e-07,
681
+ "loss": 1.6307,
682
+ "step": 108768
683
+ },
684
+ {
685
+ "epoch": 96.91466372657112,
686
+ "grad_norm": 0.42345738410949707,
687
+ "learning_rate": 3.0485436893203884e-07,
688
+ "loss": 1.6314,
689
+ "step": 109901
690
+ },
691
+ {
692
+ "epoch": 97.91378169790518,
693
+ "grad_norm": 0.4951033890247345,
694
+ "learning_rate": 2.0494263018534864e-07,
695
+ "loss": 1.6341,
696
+ "step": 111034
697
+ },
698
+ {
699
+ "epoch": 98.91289966923925,
700
+ "grad_norm": 0.40377479791641235,
701
+ "learning_rate": 1.0503089143865844e-07,
702
+ "loss": 1.6174,
703
+ "step": 112167
704
+ },
705
+ {
706
+ "epoch": 99.91201764057332,
707
+ "grad_norm": 0.5117936730384827,
708
+ "learning_rate": 5.03089143865843e-09,
709
+ "loss": 1.6313,
710
+ "step": 113300
711
+ },
712
+ {
713
+ "epoch": 99.91201764057332,
714
+ "step": 113300,
715
+ "total_flos": 5.364383961180322e+18,
716
+ "train_loss": 1.6900030501417007,
717
+ "train_runtime": 96835.149,
718
+ "train_samples_per_second": 140.49,
719
+ "train_steps_per_second": 1.17
720
+ }
721
+ ],
722
+ "logging_steps": 1133,
723
+ "max_steps": 113300,
724
+ "num_input_tokens_seen": 0,
725
+ "num_train_epochs": 100,
726
+ "save_steps": 6800,
727
+ "stateful_callbacks": {
728
+ "TrainerControl": {
729
+ "args": {
730
+ "should_epoch_stop": false,
731
+ "should_evaluate": false,
732
+ "should_log": false,
733
+ "should_save": true,
734
+ "should_training_stop": true
735
+ },
736
+ "attributes": {}
737
+ }
738
+ },
739
+ "total_flos": 5.364383961180322e+18,
740
+ "train_batch_size": 30,
741
+ "trial_name": null,
742
+ "trial_params": null
743
+ }
vocab.txt ADDED
The diff for this file is too large to render. See raw diff