sharkMeow commited on
Commit
467bd32
·
verified ·
1 Parent(s): 851278f

End of training

Browse files
all_results.json ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 99.8648280558393,
3
+ "total_flos": 3.848614248660486e+18,
4
+ "train_loss": 1.9516082713779377,
5
+ "train_runtime": 79711.1977,
6
+ "train_samples_per_second": 184.18,
7
+ "train_steps_per_second": 0.921
8
+ }
preprocessor_config.json ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "crop_size": {
3
+ "height": 224,
4
+ "width": 224
5
+ },
6
+ "do_center_crop": false,
7
+ "do_convert_rgb": true,
8
+ "do_normalize": true,
9
+ "do_rescale": true,
10
+ "do_resize": true,
11
+ "image_mean": [
12
+ 0.48145466,
13
+ 0.4578275,
14
+ 0.40821073
15
+ ],
16
+ "image_processor_type": "ChineseCLIPImageProcessor",
17
+ "image_std": [
18
+ 0.26862954,
19
+ 0.26130258,
20
+ 0.27577711
21
+ ],
22
+ "resample": 3,
23
+ "rescale_factor": 0.00392156862745098,
24
+ "size": {
25
+ "height": 224,
26
+ "width": 224
27
+ }
28
+ }
special_tokens_map.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "cls_token": "[CLS]",
3
+ "mask_token": "[MASK]",
4
+ "pad_token": "[PAD]",
5
+ "sep_token": "[SEP]",
6
+ "unk_token": "[UNK]"
7
+ }
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
tokenizer_config.json ADDED
@@ -0,0 +1,58 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "added_tokens_decoder": {
3
+ "0": {
4
+ "content": "[PAD]",
5
+ "lstrip": false,
6
+ "normalized": false,
7
+ "rstrip": false,
8
+ "single_word": false,
9
+ "special": true
10
+ },
11
+ "100": {
12
+ "content": "[UNK]",
13
+ "lstrip": false,
14
+ "normalized": false,
15
+ "rstrip": false,
16
+ "single_word": false,
17
+ "special": true
18
+ },
19
+ "101": {
20
+ "content": "[CLS]",
21
+ "lstrip": false,
22
+ "normalized": false,
23
+ "rstrip": false,
24
+ "single_word": false,
25
+ "special": true
26
+ },
27
+ "102": {
28
+ "content": "[SEP]",
29
+ "lstrip": false,
30
+ "normalized": false,
31
+ "rstrip": false,
32
+ "single_word": false,
33
+ "special": true
34
+ },
35
+ "103": {
36
+ "content": "[MASK]",
37
+ "lstrip": false,
38
+ "normalized": false,
39
+ "rstrip": false,
40
+ "single_word": false,
41
+ "special": true
42
+ }
43
+ },
44
+ "clean_up_tokenization_spaces": true,
45
+ "cls_token": "[CLS]",
46
+ "do_basic_tokenize": true,
47
+ "do_lower_case": true,
48
+ "extra_special_tokens": {},
49
+ "mask_token": "[MASK]",
50
+ "model_max_length": 1000000000000000019884624838656,
51
+ "never_split": null,
52
+ "pad_token": "[PAD]",
53
+ "sep_token": "[SEP]",
54
+ "strip_accents": null,
55
+ "tokenize_chinese_chars": true,
56
+ "tokenizer_class": "BertTokenizer",
57
+ "unk_token": "[UNK]"
58
+ }
train_results.json ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 99.8648280558393,
3
+ "total_flos": 3.848614248660486e+18,
4
+ "train_loss": 1.9516082713779377,
5
+ "train_runtime": 79711.1977,
6
+ "train_samples_per_second": 184.18,
7
+ "train_steps_per_second": 0.921
8
+ }
trainer_state.json ADDED
@@ -0,0 +1,743 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_global_step": null,
3
+ "best_metric": null,
4
+ "best_model_checkpoint": null,
5
+ "epoch": 99.8648280558393,
6
+ "eval_steps": 500,
7
+ "global_step": 73400,
8
+ "is_hyper_param_search": false,
9
+ "is_local_process_zero": true,
10
+ "is_world_process_zero": true,
11
+ "log_history": [
12
+ {
13
+ "epoch": 0.9996595165134491,
14
+ "grad_norm": 1.3896363973617554,
15
+ "learning_rate": 9.90040871934605e-06,
16
+ "loss": 2.3637,
17
+ "step": 734
18
+ },
19
+ {
20
+ "epoch": 1.9982975825672455,
21
+ "grad_norm": 1.085627794265747,
22
+ "learning_rate": 9.80040871934605e-06,
23
+ "loss": 2.3048,
24
+ "step": 1468
25
+ },
26
+ {
27
+ "epoch": 2.996935648621042,
28
+ "grad_norm": 1.7355504035949707,
29
+ "learning_rate": 9.700544959128066e-06,
30
+ "loss": 2.2626,
31
+ "step": 2202
32
+ },
33
+ {
34
+ "epoch": 3.9955737146748382,
35
+ "grad_norm": 2.631619691848755,
36
+ "learning_rate": 9.600544959128067e-06,
37
+ "loss": 2.2366,
38
+ "step": 2936
39
+ },
40
+ {
41
+ "epoch": 4.994211780728635,
42
+ "grad_norm": 3.0078556537628174,
43
+ "learning_rate": 9.500544959128066e-06,
44
+ "loss": 2.1902,
45
+ "step": 3670
46
+ },
47
+ {
48
+ "epoch": 5.992849846782431,
49
+ "grad_norm": 3.808694362640381,
50
+ "learning_rate": 9.400681198910083e-06,
51
+ "loss": 2.1761,
52
+ "step": 4404
53
+ },
54
+ {
55
+ "epoch": 6.991487912836227,
56
+ "grad_norm": 2.927497625350952,
57
+ "learning_rate": 9.300681198910082e-06,
58
+ "loss": 2.1423,
59
+ "step": 5138
60
+ },
61
+ {
62
+ "epoch": 7.990125978890024,
63
+ "grad_norm": 3.1034231185913086,
64
+ "learning_rate": 9.200681198910083e-06,
65
+ "loss": 2.1311,
66
+ "step": 5872
67
+ },
68
+ {
69
+ "epoch": 8.98876404494382,
70
+ "grad_norm": 2.8936243057250977,
71
+ "learning_rate": 9.1008174386921e-06,
72
+ "loss": 2.1006,
73
+ "step": 6606
74
+ },
75
+ {
76
+ "epoch": 9.987402110997616,
77
+ "grad_norm": 3.1552979946136475,
78
+ "learning_rate": 9.000817438692099e-06,
79
+ "loss": 2.1108,
80
+ "step": 7340
81
+ },
82
+ {
83
+ "epoch": 10.986040177051413,
84
+ "grad_norm": 4.212212562561035,
85
+ "learning_rate": 8.900817438692098e-06,
86
+ "loss": 2.0826,
87
+ "step": 8074
88
+ },
89
+ {
90
+ "epoch": 11.98467824310521,
91
+ "grad_norm": 3.0641448497772217,
92
+ "learning_rate": 8.800953678474115e-06,
93
+ "loss": 2.0827,
94
+ "step": 8808
95
+ },
96
+ {
97
+ "epoch": 12.983316309159006,
98
+ "grad_norm": 3.0309231281280518,
99
+ "learning_rate": 8.700953678474116e-06,
100
+ "loss": 2.0707,
101
+ "step": 9542
102
+ },
103
+ {
104
+ "epoch": 13.981954375212801,
105
+ "grad_norm": 2.910205364227295,
106
+ "learning_rate": 8.600953678474115e-06,
107
+ "loss": 2.0545,
108
+ "step": 10276
109
+ },
110
+ {
111
+ "epoch": 14.980592441266598,
112
+ "grad_norm": 2.4057796001434326,
113
+ "learning_rate": 8.501089918256132e-06,
114
+ "loss": 2.0518,
115
+ "step": 11010
116
+ },
117
+ {
118
+ "epoch": 15.979230507320395,
119
+ "grad_norm": 3.38080096244812,
120
+ "learning_rate": 8.401089918256131e-06,
121
+ "loss": 2.0431,
122
+ "step": 11744
123
+ },
124
+ {
125
+ "epoch": 16.97786857337419,
126
+ "grad_norm": 3.414673328399658,
127
+ "learning_rate": 8.301089918256132e-06,
128
+ "loss": 2.0278,
129
+ "step": 12478
130
+ },
131
+ {
132
+ "epoch": 17.97650663942799,
133
+ "grad_norm": 3.2804346084594727,
134
+ "learning_rate": 8.201089918256131e-06,
135
+ "loss": 2.0394,
136
+ "step": 13212
137
+ },
138
+ {
139
+ "epoch": 18.975144705481785,
140
+ "grad_norm": 3.313039541244507,
141
+ "learning_rate": 8.101089918256132e-06,
142
+ "loss": 2.0239,
143
+ "step": 13946
144
+ },
145
+ {
146
+ "epoch": 19.973782771535582,
147
+ "grad_norm": 3.2474405765533447,
148
+ "learning_rate": 8.001226158038149e-06,
149
+ "loss": 2.0155,
150
+ "step": 14680
151
+ },
152
+ {
153
+ "epoch": 20.972420837589375,
154
+ "grad_norm": 3.154122829437256,
155
+ "learning_rate": 7.901226158038148e-06,
156
+ "loss": 2.0183,
157
+ "step": 15414
158
+ },
159
+ {
160
+ "epoch": 21.971058903643172,
161
+ "grad_norm": 4.2719407081604,
162
+ "learning_rate": 7.801226158038147e-06,
163
+ "loss": 2.0059,
164
+ "step": 16148
165
+ },
166
+ {
167
+ "epoch": 22.96969696969697,
168
+ "grad_norm": 3.08585262298584,
169
+ "learning_rate": 7.701226158038148e-06,
170
+ "loss": 2.007,
171
+ "step": 16882
172
+ },
173
+ {
174
+ "epoch": 23.968335035750766,
175
+ "grad_norm": 3.8600127696990967,
176
+ "learning_rate": 7.601362397820165e-06,
177
+ "loss": 1.9813,
178
+ "step": 17616
179
+ },
180
+ {
181
+ "epoch": 24.966973101804562,
182
+ "grad_norm": 4.064390182495117,
183
+ "learning_rate": 7.501362397820165e-06,
184
+ "loss": 1.9909,
185
+ "step": 18350
186
+ },
187
+ {
188
+ "epoch": 25.96561116785836,
189
+ "grad_norm": 5.1680169105529785,
190
+ "learning_rate": 7.401362397820164e-06,
191
+ "loss": 1.9781,
192
+ "step": 19084
193
+ },
194
+ {
195
+ "epoch": 26.964249233912156,
196
+ "grad_norm": 3.716388702392578,
197
+ "learning_rate": 7.30149863760218e-06,
198
+ "loss": 1.9841,
199
+ "step": 19818
200
+ },
201
+ {
202
+ "epoch": 27.962887299965953,
203
+ "grad_norm": 1.9636414051055908,
204
+ "learning_rate": 7.20149863760218e-06,
205
+ "loss": 1.9727,
206
+ "step": 20552
207
+ },
208
+ {
209
+ "epoch": 28.96152536601975,
210
+ "grad_norm": 2.968393087387085,
211
+ "learning_rate": 7.10149863760218e-06,
212
+ "loss": 1.9729,
213
+ "step": 21286
214
+ },
215
+ {
216
+ "epoch": 29.960163432073543,
217
+ "grad_norm": 1.9185447692871094,
218
+ "learning_rate": 7.001498637602181e-06,
219
+ "loss": 1.9586,
220
+ "step": 22020
221
+ },
222
+ {
223
+ "epoch": 30.95880149812734,
224
+ "grad_norm": 3.3125815391540527,
225
+ "learning_rate": 6.901634877384197e-06,
226
+ "loss": 1.9661,
227
+ "step": 22754
228
+ },
229
+ {
230
+ "epoch": 31.957439564181136,
231
+ "grad_norm": 3.3222153186798096,
232
+ "learning_rate": 6.801634877384197e-06,
233
+ "loss": 1.9532,
234
+ "step": 23488
235
+ },
236
+ {
237
+ "epoch": 32.95607763023494,
238
+ "grad_norm": 2.3255455493927,
239
+ "learning_rate": 6.701634877384197e-06,
240
+ "loss": 1.9445,
241
+ "step": 24222
242
+ },
243
+ {
244
+ "epoch": 33.95471569628873,
245
+ "grad_norm": 3.436879873275757,
246
+ "learning_rate": 6.6017711171662135e-06,
247
+ "loss": 1.9564,
248
+ "step": 24956
249
+ },
250
+ {
251
+ "epoch": 34.95335376234252,
252
+ "grad_norm": 2.789512872695923,
253
+ "learning_rate": 6.501771117166214e-06,
254
+ "loss": 1.9458,
255
+ "step": 25690
256
+ },
257
+ {
258
+ "epoch": 35.951991828396324,
259
+ "grad_norm": 2.496615409851074,
260
+ "learning_rate": 6.401771117166214e-06,
261
+ "loss": 1.9314,
262
+ "step": 26424
263
+ },
264
+ {
265
+ "epoch": 36.95062989445012,
266
+ "grad_norm": 3.680979013442993,
267
+ "learning_rate": 6.301907356948229e-06,
268
+ "loss": 1.9481,
269
+ "step": 27158
270
+ },
271
+ {
272
+ "epoch": 37.94926796050392,
273
+ "grad_norm": 3.8750393390655518,
274
+ "learning_rate": 6.201907356948229e-06,
275
+ "loss": 1.9346,
276
+ "step": 27892
277
+ },
278
+ {
279
+ "epoch": 38.94790602655771,
280
+ "grad_norm": 3.358818531036377,
281
+ "learning_rate": 6.101907356948229e-06,
282
+ "loss": 1.9426,
283
+ "step": 28626
284
+ },
285
+ {
286
+ "epoch": 39.94654409261151,
287
+ "grad_norm": 3.4985063076019287,
288
+ "learning_rate": 6.001907356948229e-06,
289
+ "loss": 1.9319,
290
+ "step": 29360
291
+ },
292
+ {
293
+ "epoch": 40.945182158665304,
294
+ "grad_norm": 3.5801708698272705,
295
+ "learning_rate": 5.9020435967302455e-06,
296
+ "loss": 1.9316,
297
+ "step": 30094
298
+ },
299
+ {
300
+ "epoch": 41.943820224719104,
301
+ "grad_norm": 2.131335973739624,
302
+ "learning_rate": 5.802043596730246e-06,
303
+ "loss": 1.9267,
304
+ "step": 30828
305
+ },
306
+ {
307
+ "epoch": 42.9424582907729,
308
+ "grad_norm": 3.048663854598999,
309
+ "learning_rate": 5.702043596730246e-06,
310
+ "loss": 1.9313,
311
+ "step": 31562
312
+ },
313
+ {
314
+ "epoch": 43.94109635682669,
315
+ "grad_norm": 4.18536376953125,
316
+ "learning_rate": 5.602043596730246e-06,
317
+ "loss": 1.9307,
318
+ "step": 32296
319
+ },
320
+ {
321
+ "epoch": 44.93973442288049,
322
+ "grad_norm": 3.1449191570281982,
323
+ "learning_rate": 5.502043596730245e-06,
324
+ "loss": 1.9121,
325
+ "step": 33030
326
+ },
327
+ {
328
+ "epoch": 45.938372488934284,
329
+ "grad_norm": 2.445446491241455,
330
+ "learning_rate": 5.4021798365122625e-06,
331
+ "loss": 1.9147,
332
+ "step": 33764
333
+ },
334
+ {
335
+ "epoch": 46.937010554988085,
336
+ "grad_norm": 3.044128656387329,
337
+ "learning_rate": 5.302316076294278e-06,
338
+ "loss": 1.9179,
339
+ "step": 34498
340
+ },
341
+ {
342
+ "epoch": 47.93564862104188,
343
+ "grad_norm": 2.411952018737793,
344
+ "learning_rate": 5.202316076294278e-06,
345
+ "loss": 1.9175,
346
+ "step": 35232
347
+ },
348
+ {
349
+ "epoch": 48.93428668709568,
350
+ "grad_norm": 4.5417799949646,
351
+ "learning_rate": 5.1023160762942784e-06,
352
+ "loss": 1.9139,
353
+ "step": 35966
354
+ },
355
+ {
356
+ "epoch": 49.93292475314947,
357
+ "grad_norm": 3.36789608001709,
358
+ "learning_rate": 5.002452316076295e-06,
359
+ "loss": 1.8999,
360
+ "step": 36700
361
+ },
362
+ {
363
+ "epoch": 50.93156281920327,
364
+ "grad_norm": 1.4864888191223145,
365
+ "learning_rate": 4.902452316076295e-06,
366
+ "loss": 1.9151,
367
+ "step": 37434
368
+ },
369
+ {
370
+ "epoch": 51.930200885257065,
371
+ "grad_norm": 2.543835163116455,
372
+ "learning_rate": 4.802452316076295e-06,
373
+ "loss": 1.9049,
374
+ "step": 38168
375
+ },
376
+ {
377
+ "epoch": 52.92883895131086,
378
+ "grad_norm": 2.121729850769043,
379
+ "learning_rate": 4.7024523160762945e-06,
380
+ "loss": 1.9099,
381
+ "step": 38902
382
+ },
383
+ {
384
+ "epoch": 53.92747701736466,
385
+ "grad_norm": 3.2268223762512207,
386
+ "learning_rate": 4.602452316076295e-06,
387
+ "loss": 1.9071,
388
+ "step": 39636
389
+ },
390
+ {
391
+ "epoch": 54.92611508341845,
392
+ "grad_norm": 3.928720235824585,
393
+ "learning_rate": 4.502452316076295e-06,
394
+ "loss": 1.8883,
395
+ "step": 40370
396
+ },
397
+ {
398
+ "epoch": 55.92475314947225,
399
+ "grad_norm": 2.6059372425079346,
400
+ "learning_rate": 4.402588555858311e-06,
401
+ "loss": 1.9033,
402
+ "step": 41104
403
+ },
404
+ {
405
+ "epoch": 56.923391215526046,
406
+ "grad_norm": 2.899505615234375,
407
+ "learning_rate": 4.3025885558583105e-06,
408
+ "loss": 1.8852,
409
+ "step": 41838
410
+ },
411
+ {
412
+ "epoch": 57.922029281579846,
413
+ "grad_norm": 2.1250553131103516,
414
+ "learning_rate": 4.202588555858311e-06,
415
+ "loss": 1.8919,
416
+ "step": 42572
417
+ },
418
+ {
419
+ "epoch": 58.92066734763364,
420
+ "grad_norm": 3.6068265438079834,
421
+ "learning_rate": 4.102588555858311e-06,
422
+ "loss": 1.8801,
423
+ "step": 43306
424
+ },
425
+ {
426
+ "epoch": 59.91930541368744,
427
+ "grad_norm": 3.1520488262176514,
428
+ "learning_rate": 4.002588555858311e-06,
429
+ "loss": 1.8997,
430
+ "step": 44040
431
+ },
432
+ {
433
+ "epoch": 60.91794347974123,
434
+ "grad_norm": 2.7997710704803467,
435
+ "learning_rate": 3.902588555858311e-06,
436
+ "loss": 1.8942,
437
+ "step": 44774
438
+ },
439
+ {
440
+ "epoch": 61.916581545795026,
441
+ "grad_norm": 2.2547781467437744,
442
+ "learning_rate": 3.8027247956403276e-06,
443
+ "loss": 1.8996,
444
+ "step": 45508
445
+ },
446
+ {
447
+ "epoch": 62.915219611848826,
448
+ "grad_norm": 2.583367347717285,
449
+ "learning_rate": 3.7027247956403272e-06,
450
+ "loss": 1.8811,
451
+ "step": 46242
452
+ },
453
+ {
454
+ "epoch": 63.91385767790262,
455
+ "grad_norm": 3.9086215496063232,
456
+ "learning_rate": 3.6027247956403274e-06,
457
+ "loss": 1.8878,
458
+ "step": 46976
459
+ },
460
+ {
461
+ "epoch": 64.91249574395641,
462
+ "grad_norm": 1.8989681005477905,
463
+ "learning_rate": 3.50299727520436e-06,
464
+ "loss": 1.8987,
465
+ "step": 47710
466
+ },
467
+ {
468
+ "epoch": 65.91113381001021,
469
+ "grad_norm": 2.5425479412078857,
470
+ "learning_rate": 3.40299727520436e-06,
471
+ "loss": 1.8928,
472
+ "step": 48444
473
+ },
474
+ {
475
+ "epoch": 66.90977187606401,
476
+ "grad_norm": 2.8161380290985107,
477
+ "learning_rate": 3.3029972752043598e-06,
478
+ "loss": 1.8833,
479
+ "step": 49178
480
+ },
481
+ {
482
+ "epoch": 67.90840994211781,
483
+ "grad_norm": 3.328853130340576,
484
+ "learning_rate": 3.20299727520436e-06,
485
+ "loss": 1.8802,
486
+ "step": 49912
487
+ },
488
+ {
489
+ "epoch": 68.9070480081716,
490
+ "grad_norm": 1.836472749710083,
491
+ "learning_rate": 3.1029972752043596e-06,
492
+ "loss": 1.8821,
493
+ "step": 50646
494
+ },
495
+ {
496
+ "epoch": 69.9056860742254,
497
+ "grad_norm": 3.308563470840454,
498
+ "learning_rate": 3.00299727520436e-06,
499
+ "loss": 1.8824,
500
+ "step": 51380
501
+ },
502
+ {
503
+ "epoch": 70.9043241402792,
504
+ "grad_norm": 2.7526936531066895,
505
+ "learning_rate": 2.9031335149863767e-06,
506
+ "loss": 1.8828,
507
+ "step": 52114
508
+ },
509
+ {
510
+ "epoch": 71.90296220633299,
511
+ "grad_norm": 2.601332664489746,
512
+ "learning_rate": 2.8031335149863763e-06,
513
+ "loss": 1.8872,
514
+ "step": 52848
515
+ },
516
+ {
517
+ "epoch": 72.90160027238679,
518
+ "grad_norm": 3.038666248321533,
519
+ "learning_rate": 2.7031335149863765e-06,
520
+ "loss": 1.8861,
521
+ "step": 53582
522
+ },
523
+ {
524
+ "epoch": 73.90023833844059,
525
+ "grad_norm": 2.3815577030181885,
526
+ "learning_rate": 2.6032697547683926e-06,
527
+ "loss": 1.8768,
528
+ "step": 54316
529
+ },
530
+ {
531
+ "epoch": 74.89887640449439,
532
+ "grad_norm": 3.253783941268921,
533
+ "learning_rate": 2.5032697547683927e-06,
534
+ "loss": 1.8767,
535
+ "step": 55050
536
+ },
537
+ {
538
+ "epoch": 75.89751447054817,
539
+ "grad_norm": 1.8898478746414185,
540
+ "learning_rate": 2.403405994550409e-06,
541
+ "loss": 1.8778,
542
+ "step": 55784
543
+ },
544
+ {
545
+ "epoch": 76.89615253660197,
546
+ "grad_norm": 2.8832762241363525,
547
+ "learning_rate": 2.303405994550409e-06,
548
+ "loss": 1.8816,
549
+ "step": 56518
550
+ },
551
+ {
552
+ "epoch": 77.89479060265577,
553
+ "grad_norm": 2.767455577850342,
554
+ "learning_rate": 2.2034059945504087e-06,
555
+ "loss": 1.8817,
556
+ "step": 57252
557
+ },
558
+ {
559
+ "epoch": 78.89342866870956,
560
+ "grad_norm": 1.9658331871032715,
561
+ "learning_rate": 2.1035422343324252e-06,
562
+ "loss": 1.8857,
563
+ "step": 57986
564
+ },
565
+ {
566
+ "epoch": 79.89206673476336,
567
+ "grad_norm": 2.7493135929107666,
568
+ "learning_rate": 2.003542234332425e-06,
569
+ "loss": 1.8763,
570
+ "step": 58720
571
+ },
572
+ {
573
+ "epoch": 80.89070480081716,
574
+ "grad_norm": 2.519388198852539,
575
+ "learning_rate": 1.9035422343324252e-06,
576
+ "loss": 1.8782,
577
+ "step": 59454
578
+ },
579
+ {
580
+ "epoch": 81.88934286687096,
581
+ "grad_norm": 2.258284091949463,
582
+ "learning_rate": 1.8036784741144416e-06,
583
+ "loss": 1.8709,
584
+ "step": 60188
585
+ },
586
+ {
587
+ "epoch": 82.88798093292475,
588
+ "grad_norm": 2.6191112995147705,
589
+ "learning_rate": 1.7038147138964577e-06,
590
+ "loss": 1.8486,
591
+ "step": 60922
592
+ },
593
+ {
594
+ "epoch": 83.88661899897855,
595
+ "grad_norm": 3.241089105606079,
596
+ "learning_rate": 1.603814713896458e-06,
597
+ "loss": 1.8726,
598
+ "step": 61656
599
+ },
600
+ {
601
+ "epoch": 84.88525706503235,
602
+ "grad_norm": 1.7192323207855225,
603
+ "learning_rate": 1.503814713896458e-06,
604
+ "loss": 1.8793,
605
+ "step": 62390
606
+ },
607
+ {
608
+ "epoch": 85.88389513108615,
609
+ "grad_norm": 2.782193899154663,
610
+ "learning_rate": 1.4038147138964579e-06,
611
+ "loss": 1.8714,
612
+ "step": 63124
613
+ },
614
+ {
615
+ "epoch": 86.88253319713994,
616
+ "grad_norm": 2.659689426422119,
617
+ "learning_rate": 1.303814713896458e-06,
618
+ "loss": 1.8908,
619
+ "step": 63858
620
+ },
621
+ {
622
+ "epoch": 87.88117126319374,
623
+ "grad_norm": 1.9582741260528564,
624
+ "learning_rate": 1.2038147138964579e-06,
625
+ "loss": 1.8595,
626
+ "step": 64592
627
+ },
628
+ {
629
+ "epoch": 88.87980932924754,
630
+ "grad_norm": 1.9837833642959595,
631
+ "learning_rate": 1.1039509536784742e-06,
632
+ "loss": 1.875,
633
+ "step": 65326
634
+ },
635
+ {
636
+ "epoch": 89.87844739530132,
637
+ "grad_norm": 2.076040506362915,
638
+ "learning_rate": 1.0039509536784741e-06,
639
+ "loss": 1.8664,
640
+ "step": 66060
641
+ },
642
+ {
643
+ "epoch": 90.87708546135512,
644
+ "grad_norm": 1.5430365800857544,
645
+ "learning_rate": 9.039509536784742e-07,
646
+ "loss": 1.8875,
647
+ "step": 66794
648
+ },
649
+ {
650
+ "epoch": 91.87572352740892,
651
+ "grad_norm": 1.8234200477600098,
652
+ "learning_rate": 8.040871934604905e-07,
653
+ "loss": 1.868,
654
+ "step": 67528
655
+ },
656
+ {
657
+ "epoch": 92.87436159346272,
658
+ "grad_norm": 2.1992905139923096,
659
+ "learning_rate": 7.040871934604905e-07,
660
+ "loss": 1.8844,
661
+ "step": 68262
662
+ },
663
+ {
664
+ "epoch": 93.87299965951651,
665
+ "grad_norm": 2.1095755100250244,
666
+ "learning_rate": 6.042234332425069e-07,
667
+ "loss": 1.8808,
668
+ "step": 68996
669
+ },
670
+ {
671
+ "epoch": 94.87163772557031,
672
+ "grad_norm": 2.1402716636657715,
673
+ "learning_rate": 5.042234332425069e-07,
674
+ "loss": 1.8661,
675
+ "step": 69730
676
+ },
677
+ {
678
+ "epoch": 95.87027579162411,
679
+ "grad_norm": 2.9710769653320312,
680
+ "learning_rate": 4.0422343324250687e-07,
681
+ "loss": 1.8651,
682
+ "step": 70464
683
+ },
684
+ {
685
+ "epoch": 96.8689138576779,
686
+ "grad_norm": 1.769300103187561,
687
+ "learning_rate": 3.043596730245232e-07,
688
+ "loss": 1.8726,
689
+ "step": 71198
690
+ },
691
+ {
692
+ "epoch": 97.8675519237317,
693
+ "grad_norm": 2.095330238342285,
694
+ "learning_rate": 2.0435967302452318e-07,
695
+ "loss": 1.8691,
696
+ "step": 71932
697
+ },
698
+ {
699
+ "epoch": 98.8661899897855,
700
+ "grad_norm": 2.741286039352417,
701
+ "learning_rate": 1.0435967302452316e-07,
702
+ "loss": 1.8809,
703
+ "step": 72666
704
+ },
705
+ {
706
+ "epoch": 99.8648280558393,
707
+ "grad_norm": 2.270244836807251,
708
+ "learning_rate": 4.49591280653951e-09,
709
+ "loss": 1.8619,
710
+ "step": 73400
711
+ },
712
+ {
713
+ "epoch": 99.8648280558393,
714
+ "step": 73400,
715
+ "total_flos": 3.848614248660486e+18,
716
+ "train_loss": 1.9516082713779377,
717
+ "train_runtime": 79711.1977,
718
+ "train_samples_per_second": 184.18,
719
+ "train_steps_per_second": 0.921
720
+ }
721
+ ],
722
+ "logging_steps": 734,
723
+ "max_steps": 73400,
724
+ "num_input_tokens_seen": 0,
725
+ "num_train_epochs": 100,
726
+ "save_steps": 6800,
727
+ "stateful_callbacks": {
728
+ "TrainerControl": {
729
+ "args": {
730
+ "should_epoch_stop": false,
731
+ "should_evaluate": false,
732
+ "should_log": false,
733
+ "should_save": true,
734
+ "should_training_stop": true
735
+ },
736
+ "attributes": {}
737
+ }
738
+ },
739
+ "total_flos": 3.848614248660486e+18,
740
+ "train_batch_size": 50,
741
+ "trial_name": null,
742
+ "trial_params": null
743
+ }
vocab.txt ADDED
The diff for this file is too large to render. See raw diff