sengi commited on
Commit
2196a44
·
verified ·
1 Parent(s): 478d56c

Model save

Browse files
README.md CHANGED
@@ -49,7 +49,7 @@ The following hyperparameters were used during training:
49
  - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
50
  - lr_scheduler_type: cosine
51
  - lr_scheduler_warmup_ratio: 0.1
52
- - training_steps: 1000
53
  - mixed_precision_training: Native AMP
54
 
55
  ### Training results
@@ -61,5 +61,5 @@ The following hyperparameters were used during training:
61
  - PEFT 0.10.0
62
  - Transformers 4.37.2
63
  - Pytorch 2.2.0
64
- - Datasets 2.14.6
65
  - Tokenizers 0.15.1
 
49
  - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
50
  - lr_scheduler_type: cosine
51
  - lr_scheduler_warmup_ratio: 0.1
52
+ - training_steps: 1
53
  - mixed_precision_training: Native AMP
54
 
55
  ### Training results
 
61
  - PEFT 0.10.0
62
  - Transformers 4.37.2
63
  - Pytorch 2.2.0
64
+ - Datasets 2.18.0
65
  - Tokenizers 0.15.1
all_results.json CHANGED
@@ -1,8 +1,8 @@
1
  {
2
- "epoch": 0.06,
3
- "train_loss": -130335.20808300782,
4
- "train_runtime": 5587.0346,
5
  "train_samples": 207865,
6
- "train_samples_per_second": 1.432,
7
- "train_steps_per_second": 0.179
8
  }
 
1
  {
2
+ "epoch": 0.0,
3
+ "train_loss": 0.0,
4
+ "train_runtime": 7.0368,
5
  "train_samples": 207865,
6
+ "train_samples_per_second": 1.137,
7
+ "train_steps_per_second": 0.142
8
  }
lora_4/adapter_config.json CHANGED
@@ -23,12 +23,12 @@
23
  "rank_pattern": {},
24
  "revision": null,
25
  "target_modules": [
 
 
26
  "down_proj",
 
27
  "k_proj",
28
  "gate_proj",
29
- "q_proj",
30
- "o_proj",
31
- "up_proj",
32
  "v_proj"
33
  ],
34
  "task_type": null,
 
23
  "rank_pattern": {},
24
  "revision": null,
25
  "target_modules": [
26
+ "o_proj",
27
+ "q_proj",
28
  "down_proj",
29
+ "up_proj",
30
  "k_proj",
31
  "gate_proj",
 
 
 
32
  "v_proj"
33
  ],
34
  "task_type": null,
lora_4/adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:69fd14c7e249a3d390d5f29ab54efb1b8ef865e9f2b0e88fce88a3d087c69f71
3
  size 167832240
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:264fb80c3a7830c9942425211af6bdc69e6bc596276a815cc422cd6c999f2d33
3
  size 167832240
runs/Apr15_11-55-54_g3012/events.out.tfevents.1713208519.g3012.56779.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:52d5d0eda8c42529d2953c318147e8d496aa540777fa16f5ccdd105d83c44f1c
3
+ size 4449
runs/Apr15_12-28-18_g3012/events.out.tfevents.1713210185.g3012.74144.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c65f75da552aa6e8a53fcc94349037117b01d3be2617d041fc1dd9a65f2c3a18
3
+ size 4449
runs/Apr15_13-38-11_g3012/events.out.tfevents.1713213653.g3012.94798.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:462234091e0f35524b1b5b033539f7e62b73ba9a8465c822056edc177aa1b828
3
+ size 4951
runs/Apr15_13-38-11_g3012/events.out.tfevents.1713213680.g3012.94798.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a1d85013a0414c1ca78d60a41d79a215912cdbc446a951bacda42e1102f09207
3
+ size 4951
runs/Apr15_13-38-11_g3012/events.out.tfevents.1713213688.g3012.94798.2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ab36f3293bfbb276089d3d1cc6baa41c5a85b67562a0494187cd4f702fd596ba
3
+ size 4951
runs/Apr15_13-38-11_g3012/events.out.tfevents.1713213696.g3012.94798.3 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:82a33bd23143823fc79d9f75676fca5bbe3096813c347386604becdcfe89613b
3
+ size 4951
runs/Apr15_13-38-11_g3012/events.out.tfevents.1713213704.g3012.94798.4 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a36c2f4825a4c93816f3e10086ee7fe1a88b25deead9e2246668781e95e4e40c
3
+ size 4951
train_results.json CHANGED
@@ -1,8 +1,8 @@
1
  {
2
- "epoch": 0.06,
3
- "train_loss": -130335.20808300782,
4
- "train_runtime": 5587.0346,
5
  "train_samples": 207865,
6
- "train_samples_per_second": 1.432,
7
- "train_steps_per_second": 0.179
8
  }
 
1
  {
2
+ "epoch": 0.0,
3
+ "train_loss": 0.0,
4
+ "train_runtime": 7.0368,
5
  "train_samples": 207865,
6
+ "train_samples_per_second": 1.137,
7
+ "train_steps_per_second": 0.142
8
  }
trainer_state.json CHANGED
@@ -1,1235 +1,35 @@
1
  {
2
  "best_metric": null,
3
  "best_model_checkpoint": null,
4
- "epoch": 0.05737563830397613,
5
  "eval_steps": 500,
6
- "global_step": 1000,
7
  "is_hyper_param_search": false,
8
  "is_local_process_zero": true,
9
  "is_world_process_zero": true,
10
  "log_history": [
11
  {
12
  "epoch": 0.0,
13
- "learning_rate": 2.0000000000000003e-06,
14
- "loss": -7547.9619,
15
  "step": 1
16
  },
17
  {
18
  "epoch": 0.0,
19
- "learning_rate": 1e-05,
20
- "loss": -9106.0537,
21
- "step": 5
22
- },
23
- {
24
- "epoch": 0.0,
25
- "learning_rate": 2e-05,
26
- "loss": -10521.2672,
27
- "step": 10
28
- },
29
- {
30
- "epoch": 0.0,
31
- "learning_rate": 3e-05,
32
- "loss": -13425.4438,
33
- "step": 15
34
- },
35
- {
36
- "epoch": 0.0,
37
- "learning_rate": 4e-05,
38
- "loss": -18578.7203,
39
- "step": 20
40
- },
41
- {
42
- "epoch": 0.0,
43
- "learning_rate": 5e-05,
44
- "loss": -29817.3,
45
- "step": 25
46
- },
47
- {
48
- "epoch": 0.0,
49
- "learning_rate": 6e-05,
50
- "loss": -61245.15,
51
- "step": 30
52
- },
53
- {
54
- "epoch": 0.0,
55
- "learning_rate": 7e-05,
56
- "loss": -120348.9625,
57
- "step": 35
58
- },
59
- {
60
- "epoch": 0.0,
61
- "learning_rate": 8e-05,
62
- "loss": -156901.3375,
63
- "step": 40
64
- },
65
- {
66
- "epoch": 0.0,
67
- "learning_rate": 9e-05,
68
- "loss": -177861.1,
69
- "step": 45
70
- },
71
- {
72
- "epoch": 0.0,
73
- "learning_rate": 0.0001,
74
- "loss": -189531.375,
75
- "step": 50
76
- },
77
- {
78
- "epoch": 0.0,
79
- "learning_rate": 0.00011000000000000002,
80
- "loss": -170585.1375,
81
- "step": 55
82
- },
83
- {
84
- "epoch": 0.0,
85
- "learning_rate": 0.00012,
86
- "loss": -174312.9,
87
- "step": 60
88
- },
89
- {
90
- "epoch": 0.0,
91
- "learning_rate": 0.00013000000000000002,
92
- "loss": -179821.775,
93
- "step": 65
94
- },
95
- {
96
- "epoch": 0.0,
97
- "learning_rate": 0.00014,
98
- "loss": -182256.625,
99
- "step": 70
100
- },
101
- {
102
- "epoch": 0.0,
103
- "learning_rate": 0.00015000000000000001,
104
- "loss": -191381.75,
105
- "step": 75
106
- },
107
- {
108
- "epoch": 0.0,
109
- "learning_rate": 0.00016,
110
- "loss": -197267.975,
111
- "step": 80
112
- },
113
- {
114
- "epoch": 0.0,
115
- "learning_rate": 0.00017,
116
- "loss": -203530.25,
117
- "step": 85
118
- },
119
- {
120
- "epoch": 0.01,
121
- "learning_rate": 0.00018,
122
- "loss": -207515.1875,
123
- "step": 90
124
- },
125
- {
126
- "epoch": 0.01,
127
- "learning_rate": 0.00019,
128
- "loss": -209935.85,
129
- "step": 95
130
- },
131
- {
132
- "epoch": 0.01,
133
- "learning_rate": 0.0002,
134
- "loss": -213990.225,
135
- "step": 100
136
- },
137
- {
138
- "epoch": 0.01,
139
- "learning_rate": 0.00019998476951563915,
140
- "loss": -216354.45,
141
- "step": 105
142
- },
143
- {
144
- "epoch": 0.01,
145
- "learning_rate": 0.0001999390827019096,
146
- "loss": -186972.275,
147
- "step": 110
148
- },
149
- {
150
- "epoch": 0.01,
151
- "learning_rate": 0.0001998629534754574,
152
- "loss": -121758.175,
153
- "step": 115
154
- },
155
- {
156
- "epoch": 0.01,
157
- "learning_rate": 0.00019975640502598244,
158
- "loss": -121319.3,
159
- "step": 120
160
- },
161
- {
162
- "epoch": 0.01,
163
- "learning_rate": 0.00019961946980917456,
164
- "loss": -120151.6,
165
- "step": 125
166
- },
167
- {
168
- "epoch": 0.01,
169
- "learning_rate": 0.00019945218953682734,
170
- "loss": -122962.075,
171
- "step": 130
172
- },
173
- {
174
- "epoch": 0.01,
175
- "learning_rate": 0.00019925461516413223,
176
- "loss": -121680.5875,
177
- "step": 135
178
- },
179
- {
180
- "epoch": 0.01,
181
- "learning_rate": 0.00019902680687415705,
182
- "loss": -124972.4125,
183
- "step": 140
184
- },
185
- {
186
- "epoch": 0.01,
187
- "learning_rate": 0.00019876883405951377,
188
- "loss": -123293.25,
189
- "step": 145
190
- },
191
- {
192
- "epoch": 0.01,
193
- "learning_rate": 0.00019848077530122083,
194
- "loss": -123971.2875,
195
- "step": 150
196
- },
197
- {
198
- "epoch": 0.01,
199
- "learning_rate": 0.00019816271834476642,
200
- "loss": -121120.7125,
201
- "step": 155
202
- },
203
- {
204
- "epoch": 0.01,
205
- "learning_rate": 0.00019781476007338058,
206
- "loss": -125770.65,
207
- "step": 160
208
- },
209
- {
210
- "epoch": 0.01,
211
- "learning_rate": 0.00019743700647852354,
212
- "loss": -122050.65,
213
- "step": 165
214
- },
215
- {
216
- "epoch": 0.01,
217
- "learning_rate": 0.00019702957262759965,
218
- "loss": -124877.2,
219
- "step": 170
220
- },
221
- {
222
- "epoch": 0.01,
223
- "learning_rate": 0.00019659258262890683,
224
- "loss": -124584.5,
225
- "step": 175
226
- },
227
- {
228
- "epoch": 0.01,
229
- "learning_rate": 0.0001961261695938319,
230
- "loss": -123650.8,
231
- "step": 180
232
- },
233
- {
234
- "epoch": 0.01,
235
- "learning_rate": 0.00019563047559630357,
236
- "loss": -122581.825,
237
- "step": 185
238
- },
239
- {
240
- "epoch": 0.01,
241
- "learning_rate": 0.00019510565162951537,
242
- "loss": -126474.3125,
243
- "step": 190
244
- },
245
- {
246
- "epoch": 0.01,
247
- "learning_rate": 0.0001945518575599317,
248
- "loss": -125734.775,
249
- "step": 195
250
- },
251
- {
252
- "epoch": 0.01,
253
- "learning_rate": 0.00019396926207859084,
254
- "loss": -124667.3875,
255
- "step": 200
256
- },
257
- {
258
- "epoch": 0.01,
259
- "learning_rate": 0.00019335804264972018,
260
- "loss": -125573.05,
261
- "step": 205
262
- },
263
- {
264
- "epoch": 0.01,
265
- "learning_rate": 0.00019271838545667876,
266
- "loss": -123766.075,
267
- "step": 210
268
- },
269
- {
270
- "epoch": 0.01,
271
- "learning_rate": 0.00019205048534524406,
272
- "loss": -127037.1,
273
- "step": 215
274
- },
275
- {
276
- "epoch": 0.01,
277
- "learning_rate": 0.0001913545457642601,
278
- "loss": -126327.7375,
279
- "step": 220
280
- },
281
- {
282
- "epoch": 0.01,
283
- "learning_rate": 0.000190630778703665,
284
- "loss": -124661.9375,
285
- "step": 225
286
- },
287
- {
288
- "epoch": 0.01,
289
- "learning_rate": 0.0001898794046299167,
290
- "loss": -126650.575,
291
- "step": 230
292
- },
293
- {
294
- "epoch": 0.01,
295
- "learning_rate": 0.0001891006524188368,
296
- "loss": -125326.3375,
297
- "step": 235
298
- },
299
- {
300
- "epoch": 0.01,
301
- "learning_rate": 0.00018829475928589271,
302
- "loss": -126330.2125,
303
- "step": 240
304
- },
305
- {
306
- "epoch": 0.01,
307
- "learning_rate": 0.00018746197071393958,
308
- "loss": -129089.6,
309
- "step": 245
310
- },
311
- {
312
- "epoch": 0.01,
313
- "learning_rate": 0.00018660254037844388,
314
- "loss": -127315.075,
315
- "step": 250
316
- },
317
- {
318
- "epoch": 0.01,
319
- "learning_rate": 0.00018571673007021123,
320
- "loss": -128970.2875,
321
- "step": 255
322
- },
323
- {
324
- "epoch": 0.01,
325
- "learning_rate": 0.0001848048096156426,
326
- "loss": -127177.8625,
327
- "step": 260
328
- },
329
- {
330
- "epoch": 0.02,
331
- "learning_rate": 0.00018386705679454242,
332
- "loss": -126255.525,
333
- "step": 265
334
- },
335
- {
336
- "epoch": 0.02,
337
- "learning_rate": 0.00018290375725550417,
338
- "loss": -126107.25,
339
- "step": 270
340
- },
341
- {
342
- "epoch": 0.02,
343
- "learning_rate": 0.0001819152044288992,
344
- "loss": -128510.775,
345
- "step": 275
346
- },
347
- {
348
- "epoch": 0.02,
349
- "learning_rate": 0.00018090169943749476,
350
- "loss": -125616.225,
351
- "step": 280
352
- },
353
- {
354
- "epoch": 0.02,
355
- "learning_rate": 0.00017986355100472928,
356
- "loss": -129116.6125,
357
- "step": 285
358
- },
359
- {
360
- "epoch": 0.02,
361
- "learning_rate": 0.00017880107536067218,
362
- "loss": -129702.9375,
363
- "step": 290
364
- },
365
- {
366
- "epoch": 0.02,
367
- "learning_rate": 0.0001777145961456971,
368
- "loss": -129070.425,
369
- "step": 295
370
- },
371
- {
372
- "epoch": 0.02,
373
- "learning_rate": 0.0001766044443118978,
374
- "loss": -128198.4875,
375
- "step": 300
376
- },
377
- {
378
- "epoch": 0.02,
379
- "learning_rate": 0.00017547095802227723,
380
- "loss": -127598.8,
381
- "step": 305
382
- },
383
- {
384
- "epoch": 0.02,
385
- "learning_rate": 0.00017431448254773944,
386
- "loss": -128338.525,
387
- "step": 310
388
- },
389
- {
390
- "epoch": 0.02,
391
- "learning_rate": 0.00017313537016191706,
392
- "loss": -127601.575,
393
- "step": 315
394
- },
395
- {
396
- "epoch": 0.02,
397
- "learning_rate": 0.0001719339800338651,
398
- "loss": -126842.4125,
399
- "step": 320
400
- },
401
- {
402
- "epoch": 0.02,
403
- "learning_rate": 0.00017071067811865476,
404
- "loss": -131300.9625,
405
- "step": 325
406
- },
407
- {
408
- "epoch": 0.02,
409
- "learning_rate": 0.00016946583704589973,
410
- "loss": -126271.675,
411
- "step": 330
412
- },
413
- {
414
- "epoch": 0.02,
415
- "learning_rate": 0.00016819983600624986,
416
- "loss": -130244.475,
417
- "step": 335
418
- },
419
- {
420
- "epoch": 0.02,
421
- "learning_rate": 0.00016691306063588583,
422
- "loss": -127375.9625,
423
- "step": 340
424
- },
425
- {
426
- "epoch": 0.02,
427
- "learning_rate": 0.00016560590289905073,
428
- "loss": -126334.775,
429
- "step": 345
430
- },
431
- {
432
- "epoch": 0.02,
433
- "learning_rate": 0.00016427876096865394,
434
- "loss": -127501.675,
435
- "step": 350
436
- },
437
- {
438
- "epoch": 0.02,
439
- "learning_rate": 0.00016293203910498376,
440
- "loss": -128556.2,
441
- "step": 355
442
- },
443
- {
444
- "epoch": 0.02,
445
- "learning_rate": 0.0001615661475325658,
446
- "loss": -129236.6,
447
- "step": 360
448
- },
449
- {
450
- "epoch": 0.02,
451
- "learning_rate": 0.00016018150231520486,
452
- "loss": -128883.8375,
453
- "step": 365
454
- },
455
- {
456
- "epoch": 0.02,
457
- "learning_rate": 0.00015877852522924732,
458
- "loss": -128258.1625,
459
- "step": 370
460
- },
461
- {
462
- "epoch": 0.02,
463
- "learning_rate": 0.0001573576436351046,
464
- "loss": -127891.6125,
465
- "step": 375
466
- },
467
- {
468
- "epoch": 0.02,
469
- "learning_rate": 0.0001559192903470747,
470
- "loss": -127299.3375,
471
- "step": 380
472
- },
473
- {
474
- "epoch": 0.02,
475
- "learning_rate": 0.00015446390350150273,
476
- "loss": -134468.3625,
477
- "step": 385
478
- },
479
- {
480
- "epoch": 0.02,
481
- "learning_rate": 0.0001529919264233205,
482
- "loss": -129607.4875,
483
- "step": 390
484
- },
485
- {
486
- "epoch": 0.02,
487
- "learning_rate": 0.00015150380749100545,
488
- "loss": -128010.7,
489
- "step": 395
490
- },
491
- {
492
- "epoch": 0.02,
493
- "learning_rate": 0.00015000000000000001,
494
- "loss": -129447.0625,
495
- "step": 400
496
- },
497
- {
498
- "epoch": 0.02,
499
- "learning_rate": 0.00014848096202463372,
500
- "loss": -127959.8875,
501
- "step": 405
502
- },
503
- {
504
- "epoch": 0.02,
505
- "learning_rate": 0.00014694715627858908,
506
- "loss": -129912.275,
507
- "step": 410
508
- },
509
- {
510
- "epoch": 0.02,
511
- "learning_rate": 0.00014539904997395468,
512
- "loss": -125926.1,
513
- "step": 415
514
- },
515
- {
516
- "epoch": 0.02,
517
- "learning_rate": 0.00014383711467890774,
518
- "loss": -127398.7875,
519
- "step": 420
520
- },
521
- {
522
- "epoch": 0.02,
523
- "learning_rate": 0.00014226182617406996,
524
- "loss": -129303.4,
525
- "step": 425
526
- },
527
- {
528
- "epoch": 0.02,
529
- "learning_rate": 0.00014067366430758004,
530
- "loss": -127134.8875,
531
- "step": 430
532
- },
533
- {
534
- "epoch": 0.02,
535
- "learning_rate": 0.00013907311284892736,
536
- "loss": -127413.35,
537
- "step": 435
538
- },
539
- {
540
- "epoch": 0.03,
541
- "learning_rate": 0.00013746065934159123,
542
- "loss": -128643.9125,
543
- "step": 440
544
- },
545
- {
546
- "epoch": 0.03,
547
- "learning_rate": 0.00013583679495453,
548
- "loss": -127599.2,
549
- "step": 445
550
- },
551
- {
552
- "epoch": 0.03,
553
- "learning_rate": 0.00013420201433256689,
554
- "loss": -130022.475,
555
- "step": 450
556
- },
557
- {
558
- "epoch": 0.03,
559
- "learning_rate": 0.00013255681544571568,
560
- "loss": -128485.2625,
561
- "step": 455
562
- },
563
- {
564
- "epoch": 0.03,
565
- "learning_rate": 0.00013090169943749476,
566
- "loss": -129286.0125,
567
- "step": 460
568
- },
569
- {
570
- "epoch": 0.03,
571
- "learning_rate": 0.00012923717047227368,
572
- "loss": -128943.0125,
573
- "step": 465
574
- },
575
- {
576
- "epoch": 0.03,
577
- "learning_rate": 0.0001275637355816999,
578
- "loss": -130092.2,
579
- "step": 470
580
- },
581
- {
582
- "epoch": 0.03,
583
- "learning_rate": 0.00012588190451025207,
584
- "loss": -128025.6375,
585
- "step": 475
586
- },
587
- {
588
- "epoch": 0.03,
589
- "learning_rate": 0.00012419218955996676,
590
- "loss": -127411.2875,
591
- "step": 480
592
- },
593
- {
594
- "epoch": 0.03,
595
- "learning_rate": 0.0001224951054343865,
596
- "loss": -128624.175,
597
- "step": 485
598
- },
599
- {
600
- "epoch": 0.03,
601
- "learning_rate": 0.00012079116908177593,
602
- "loss": -129188.0875,
603
- "step": 490
604
- },
605
- {
606
- "epoch": 0.03,
607
- "learning_rate": 0.00011908089953765449,
608
- "loss": -130780.65,
609
- "step": 495
610
- },
611
- {
612
- "epoch": 0.03,
613
- "learning_rate": 0.00011736481776669306,
614
- "loss": -128979.9875,
615
- "step": 500
616
- },
617
- {
618
- "epoch": 0.03,
619
- "learning_rate": 0.0001156434465040231,
620
- "loss": -128289.55,
621
- "step": 505
622
- },
623
- {
624
- "epoch": 0.03,
625
- "learning_rate": 0.00011391731009600654,
626
- "loss": -129264.7,
627
- "step": 510
628
- },
629
- {
630
- "epoch": 0.03,
631
- "learning_rate": 0.00011218693434051475,
632
- "loss": -129166.7,
633
- "step": 515
634
- },
635
- {
636
- "epoch": 0.03,
637
- "learning_rate": 0.00011045284632676536,
638
- "loss": -130517.05,
639
- "step": 520
640
- },
641
- {
642
- "epoch": 0.03,
643
- "learning_rate": 0.00010871557427476583,
644
- "loss": -129060.7125,
645
- "step": 525
646
- },
647
- {
648
- "epoch": 0.03,
649
- "learning_rate": 0.00010697564737441252,
650
- "loss": -129417.6,
651
- "step": 530
652
- },
653
- {
654
- "epoch": 0.03,
655
- "learning_rate": 0.0001052335956242944,
656
- "loss": -128037.825,
657
- "step": 535
658
- },
659
- {
660
- "epoch": 0.03,
661
- "learning_rate": 0.00010348994967025012,
662
- "loss": -128152.5375,
663
- "step": 540
664
- },
665
- {
666
- "epoch": 0.03,
667
- "learning_rate": 0.00010174524064372837,
668
- "loss": -129852.7125,
669
- "step": 545
670
- },
671
- {
672
- "epoch": 0.03,
673
- "learning_rate": 0.0001,
674
- "loss": -130428.5375,
675
- "step": 550
676
- },
677
- {
678
- "epoch": 0.03,
679
- "learning_rate": 9.825475935627165e-05,
680
- "loss": -128036.5,
681
- "step": 555
682
- },
683
- {
684
- "epoch": 0.03,
685
- "learning_rate": 9.651005032974994e-05,
686
- "loss": -130735.8875,
687
- "step": 560
688
- },
689
- {
690
- "epoch": 0.03,
691
- "learning_rate": 9.476640437570562e-05,
692
- "loss": -128337.95,
693
- "step": 565
694
- },
695
- {
696
- "epoch": 0.03,
697
- "learning_rate": 9.302435262558747e-05,
698
- "loss": -131162.225,
699
- "step": 570
700
- },
701
- {
702
- "epoch": 0.03,
703
- "learning_rate": 9.128442572523417e-05,
704
- "loss": -127932.4,
705
- "step": 575
706
- },
707
- {
708
- "epoch": 0.03,
709
- "learning_rate": 8.954715367323468e-05,
710
- "loss": -129994.8125,
711
- "step": 580
712
- },
713
- {
714
- "epoch": 0.03,
715
- "learning_rate": 8.781306565948528e-05,
716
- "loss": -132943.675,
717
- "step": 585
718
- },
719
- {
720
- "epoch": 0.03,
721
- "learning_rate": 8.608268990399349e-05,
722
- "loss": -129275.15,
723
- "step": 590
724
- },
725
- {
726
- "epoch": 0.03,
727
- "learning_rate": 8.435655349597689e-05,
728
- "loss": -134494.475,
729
- "step": 595
730
- },
731
- {
732
- "epoch": 0.03,
733
- "learning_rate": 8.263518223330697e-05,
734
- "loss": -129055.075,
735
- "step": 600
736
- },
737
- {
738
- "epoch": 0.03,
739
- "learning_rate": 8.091910046234552e-05,
740
- "loss": -130328.3625,
741
- "step": 605
742
- },
743
- {
744
- "epoch": 0.03,
745
- "learning_rate": 7.920883091822408e-05,
746
- "loss": -130358.1,
747
- "step": 610
748
- },
749
- {
750
- "epoch": 0.04,
751
- "learning_rate": 7.750489456561352e-05,
752
- "loss": -131378.525,
753
- "step": 615
754
- },
755
- {
756
- "epoch": 0.04,
757
- "learning_rate": 7.580781044003324e-05,
758
- "loss": -133240.4375,
759
- "step": 620
760
- },
761
- {
762
- "epoch": 0.04,
763
- "learning_rate": 7.411809548974792e-05,
764
- "loss": -129869.4,
765
- "step": 625
766
- },
767
- {
768
- "epoch": 0.04,
769
- "learning_rate": 7.243626441830009e-05,
770
- "loss": -129762.225,
771
- "step": 630
772
- },
773
- {
774
- "epoch": 0.04,
775
- "learning_rate": 7.076282952772633e-05,
776
- "loss": -129086.725,
777
- "step": 635
778
- },
779
- {
780
- "epoch": 0.04,
781
- "learning_rate": 6.909830056250527e-05,
782
- "loss": -132211.9,
783
- "step": 640
784
- },
785
- {
786
- "epoch": 0.04,
787
- "learning_rate": 6.744318455428436e-05,
788
- "loss": -130901.375,
789
- "step": 645
790
- },
791
- {
792
- "epoch": 0.04,
793
- "learning_rate": 6.579798566743314e-05,
794
- "loss": -128798.4,
795
- "step": 650
796
- },
797
- {
798
- "epoch": 0.04,
799
- "learning_rate": 6.416320504546997e-05,
800
- "loss": -128713.4375,
801
- "step": 655
802
- },
803
- {
804
- "epoch": 0.04,
805
- "learning_rate": 6.25393406584088e-05,
806
- "loss": -129254.1,
807
- "step": 660
808
- },
809
- {
810
- "epoch": 0.04,
811
- "learning_rate": 6.092688715107264e-05,
812
- "loss": -129648.825,
813
- "step": 665
814
- },
815
- {
816
- "epoch": 0.04,
817
- "learning_rate": 5.9326335692419995e-05,
818
- "loss": -129720.0375,
819
- "step": 670
820
- },
821
- {
822
- "epoch": 0.04,
823
- "learning_rate": 5.773817382593008e-05,
824
- "loss": -130483.5,
825
- "step": 675
826
- },
827
- {
828
- "epoch": 0.04,
829
- "learning_rate": 5.616288532109225e-05,
830
- "loss": -132085.2625,
831
- "step": 680
832
- },
833
- {
834
- "epoch": 0.04,
835
- "learning_rate": 5.4600950026045326e-05,
836
- "loss": -129280.825,
837
- "step": 685
838
- },
839
- {
840
- "epoch": 0.04,
841
- "learning_rate": 5.305284372141095e-05,
842
- "loss": -128499.675,
843
- "step": 690
844
- },
845
- {
846
- "epoch": 0.04,
847
- "learning_rate": 5.15190379753663e-05,
848
- "loss": -131128.7125,
849
- "step": 695
850
- },
851
- {
852
- "epoch": 0.04,
853
- "learning_rate": 5.000000000000002e-05,
854
- "loss": -128292.3125,
855
- "step": 700
856
- },
857
- {
858
- "epoch": 0.04,
859
- "learning_rate": 4.8496192508994576e-05,
860
- "loss": -128954.8375,
861
- "step": 705
862
- },
863
- {
864
- "epoch": 0.04,
865
- "learning_rate": 4.700807357667952e-05,
866
- "loss": -130455.6375,
867
- "step": 710
868
- },
869
- {
870
- "epoch": 0.04,
871
- "learning_rate": 4.5536096498497295e-05,
872
- "loss": -130186.5625,
873
- "step": 715
874
- },
875
- {
876
- "epoch": 0.04,
877
- "learning_rate": 4.4080709652925336e-05,
878
- "loss": -128929.5125,
879
- "step": 720
880
- },
881
- {
882
- "epoch": 0.04,
883
- "learning_rate": 4.264235636489542e-05,
884
- "loss": -129662.825,
885
- "step": 725
886
- },
887
- {
888
- "epoch": 0.04,
889
- "learning_rate": 4.12214747707527e-05,
890
- "loss": -130658.1875,
891
- "step": 730
892
- },
893
- {
894
- "epoch": 0.04,
895
- "learning_rate": 3.981849768479517e-05,
896
- "loss": -129584.05,
897
- "step": 735
898
- },
899
- {
900
- "epoch": 0.04,
901
- "learning_rate": 3.843385246743417e-05,
902
- "loss": -129051.1875,
903
- "step": 740
904
- },
905
- {
906
- "epoch": 0.04,
907
- "learning_rate": 3.7067960895016275e-05,
908
- "loss": -129906.5,
909
- "step": 745
910
- },
911
- {
912
- "epoch": 0.04,
913
- "learning_rate": 3.5721239031346066e-05,
914
- "loss": -131726.125,
915
- "step": 750
916
- },
917
- {
918
- "epoch": 0.04,
919
- "learning_rate": 3.439409710094929e-05,
920
- "loss": -130614.0125,
921
- "step": 755
922
- },
923
- {
924
- "epoch": 0.04,
925
- "learning_rate": 3.308693936411421e-05,
926
- "loss": -129400.2,
927
- "step": 760
928
- },
929
- {
930
- "epoch": 0.04,
931
- "learning_rate": 3.1800163993750166e-05,
932
- "loss": -132052.375,
933
- "step": 765
934
- },
935
- {
936
- "epoch": 0.04,
937
- "learning_rate": 3.053416295410026e-05,
938
- "loss": -136370.1,
939
- "step": 770
940
- },
941
- {
942
- "epoch": 0.04,
943
- "learning_rate": 2.9289321881345254e-05,
944
- "loss": -130621.2,
945
- "step": 775
946
- },
947
- {
948
- "epoch": 0.04,
949
- "learning_rate": 2.8066019966134904e-05,
950
- "loss": -131630.3,
951
- "step": 780
952
- },
953
- {
954
- "epoch": 0.05,
955
- "learning_rate": 2.6864629838082956e-05,
956
- "loss": -131564.9125,
957
- "step": 785
958
- },
959
- {
960
- "epoch": 0.05,
961
- "learning_rate": 2.5685517452260567e-05,
962
- "loss": -132246.5375,
963
- "step": 790
964
- },
965
- {
966
- "epoch": 0.05,
967
- "learning_rate": 2.45290419777228e-05,
968
- "loss": -129737.325,
969
- "step": 795
970
- },
971
- {
972
- "epoch": 0.05,
973
- "learning_rate": 2.339555568810221e-05,
974
- "loss": -129589.5625,
975
- "step": 800
976
- },
977
- {
978
- "epoch": 0.05,
979
- "learning_rate": 2.2285403854302912e-05,
980
- "loss": -129878.975,
981
- "step": 805
982
- },
983
- {
984
- "epoch": 0.05,
985
- "learning_rate": 2.119892463932781e-05,
986
- "loss": -131492.4375,
987
- "step": 810
988
- },
989
- {
990
- "epoch": 0.05,
991
- "learning_rate": 2.013644899527074e-05,
992
- "loss": -128856.9,
993
- "step": 815
994
- },
995
- {
996
- "epoch": 0.05,
997
- "learning_rate": 1.9098300562505266e-05,
998
- "loss": -131127.25,
999
- "step": 820
1000
- },
1001
- {
1002
- "epoch": 0.05,
1003
- "learning_rate": 1.808479557110081e-05,
1004
- "loss": -129349.7375,
1005
- "step": 825
1006
- },
1007
- {
1008
- "epoch": 0.05,
1009
- "learning_rate": 1.7096242744495837e-05,
1010
- "loss": -133686.3375,
1011
- "step": 830
1012
- },
1013
- {
1014
- "epoch": 0.05,
1015
- "learning_rate": 1.6132943205457606e-05,
1016
- "loss": -130341.85,
1017
- "step": 835
1018
- },
1019
- {
1020
- "epoch": 0.05,
1021
- "learning_rate": 1.5195190384357404e-05,
1022
- "loss": -129862.95,
1023
- "step": 840
1024
- },
1025
- {
1026
- "epoch": 0.05,
1027
- "learning_rate": 1.4283269929788779e-05,
1028
- "loss": -129905.8875,
1029
- "step": 845
1030
- },
1031
- {
1032
- "epoch": 0.05,
1033
- "learning_rate": 1.339745962155613e-05,
1034
- "loss": -133136.0,
1035
- "step": 850
1036
- },
1037
- {
1038
- "epoch": 0.05,
1039
- "learning_rate": 1.2538029286060426e-05,
1040
- "loss": -129610.375,
1041
- "step": 855
1042
- },
1043
- {
1044
- "epoch": 0.05,
1045
- "learning_rate": 1.1705240714107302e-05,
1046
- "loss": -128013.6375,
1047
- "step": 860
1048
- },
1049
- {
1050
- "epoch": 0.05,
1051
- "learning_rate": 1.0899347581163221e-05,
1052
- "loss": -128977.225,
1053
- "step": 865
1054
- },
1055
- {
1056
- "epoch": 0.05,
1057
- "learning_rate": 1.0120595370083318e-05,
1058
- "loss": -127813.3125,
1059
- "step": 870
1060
- },
1061
- {
1062
- "epoch": 0.05,
1063
- "learning_rate": 9.369221296335006e-06,
1064
- "loss": -129546.625,
1065
- "step": 875
1066
- },
1067
- {
1068
- "epoch": 0.05,
1069
- "learning_rate": 8.645454235739903e-06,
1070
- "loss": -131817.925,
1071
- "step": 880
1072
- },
1073
- {
1074
- "epoch": 0.05,
1075
- "learning_rate": 7.949514654755962e-06,
1076
- "loss": -130486.9625,
1077
- "step": 885
1078
- },
1079
- {
1080
- "epoch": 0.05,
1081
- "learning_rate": 7.281614543321269e-06,
1082
- "loss": -130403.5625,
1083
- "step": 890
1084
- },
1085
- {
1086
- "epoch": 0.05,
1087
- "learning_rate": 6.6419573502798374e-06,
1088
- "loss": -127931.8625,
1089
- "step": 895
1090
- },
1091
- {
1092
- "epoch": 0.05,
1093
- "learning_rate": 6.030737921409169e-06,
1094
- "loss": -129240.275,
1095
- "step": 900
1096
- },
1097
- {
1098
- "epoch": 0.05,
1099
- "learning_rate": 5.448142440068316e-06,
1100
- "loss": -130990.2875,
1101
- "step": 905
1102
- },
1103
- {
1104
- "epoch": 0.05,
1105
- "learning_rate": 4.8943483704846475e-06,
1106
- "loss": -132186.325,
1107
- "step": 910
1108
- },
1109
- {
1110
- "epoch": 0.05,
1111
- "learning_rate": 4.369524403696457e-06,
1112
- "loss": -131533.55,
1113
- "step": 915
1114
- },
1115
- {
1116
- "epoch": 0.05,
1117
- "learning_rate": 3.873830406168111e-06,
1118
- "loss": -133344.5,
1119
- "step": 920
1120
- },
1121
- {
1122
- "epoch": 0.05,
1123
- "learning_rate": 3.40741737109318e-06,
1124
- "loss": -130234.625,
1125
- "step": 925
1126
- },
1127
- {
1128
- "epoch": 0.05,
1129
- "learning_rate": 2.970427372400353e-06,
1130
- "loss": -130692.8375,
1131
- "step": 930
1132
- },
1133
- {
1134
- "epoch": 0.05,
1135
- "learning_rate": 2.5629935214764865e-06,
1136
- "loss": -132096.6625,
1137
- "step": 935
1138
- },
1139
- {
1140
- "epoch": 0.05,
1141
- "learning_rate": 2.1852399266194314e-06,
1142
- "loss": -132693.075,
1143
- "step": 940
1144
- },
1145
- {
1146
- "epoch": 0.05,
1147
- "learning_rate": 1.8372816552336026e-06,
1148
- "loss": -133697.4875,
1149
- "step": 945
1150
- },
1151
- {
1152
- "epoch": 0.05,
1153
- "learning_rate": 1.5192246987791981e-06,
1154
- "loss": -129152.375,
1155
- "step": 950
1156
- },
1157
- {
1158
- "epoch": 0.05,
1159
- "learning_rate": 1.231165940486234e-06,
1160
- "loss": -130557.1375,
1161
- "step": 955
1162
- },
1163
- {
1164
- "epoch": 0.06,
1165
- "learning_rate": 9.731931258429638e-07,
1166
- "loss": -129561.8625,
1167
- "step": 960
1168
- },
1169
- {
1170
- "epoch": 0.06,
1171
- "learning_rate": 7.453848358678017e-07,
1172
- "loss": -128434.575,
1173
- "step": 965
1174
- },
1175
- {
1176
- "epoch": 0.06,
1177
- "learning_rate": 5.478104631726711e-07,
1178
- "loss": -131701.65,
1179
- "step": 970
1180
- },
1181
- {
1182
- "epoch": 0.06,
1183
- "learning_rate": 3.805301908254455e-07,
1184
- "loss": -130711.15,
1185
- "step": 975
1186
- },
1187
- {
1188
- "epoch": 0.06,
1189
- "learning_rate": 2.4359497401758024e-07,
1190
- "loss": -132491.45,
1191
- "step": 980
1192
- },
1193
- {
1194
- "epoch": 0.06,
1195
- "learning_rate": 1.3704652454261668e-07,
1196
- "loss": -135885.35,
1197
- "step": 985
1198
- },
1199
- {
1200
- "epoch": 0.06,
1201
- "learning_rate": 6.09172980904238e-08,
1202
- "loss": -130926.525,
1203
- "step": 990
1204
- },
1205
- {
1206
- "epoch": 0.06,
1207
- "learning_rate": 1.5230484360873044e-08,
1208
- "loss": -130808.1,
1209
- "step": 995
1210
- },
1211
- {
1212
- "epoch": 0.06,
1213
- "learning_rate": 0.0,
1214
- "loss": -132145.975,
1215
- "step": 1000
1216
- },
1217
- {
1218
- "epoch": 0.06,
1219
- "step": 1000,
1220
- "total_flos": 7.072526939948319e+17,
1221
- "train_loss": -130335.20808300782,
1222
- "train_runtime": 5587.0346,
1223
- "train_samples_per_second": 1.432,
1224
- "train_steps_per_second": 0.179
1225
  }
1226
  ],
1227
  "logging_steps": 5,
1228
- "max_steps": 1000,
1229
  "num_input_tokens_seen": 0,
1230
  "num_train_epochs": 1,
1231
  "save_steps": 100,
1232
- "total_flos": 7.072526939948319e+17,
1233
  "train_batch_size": 2,
1234
  "trial_name": null,
1235
  "trial_params": null
 
1
  {
2
  "best_metric": null,
3
  "best_model_checkpoint": null,
4
+ "epoch": 5.7375638303976134e-05,
5
  "eval_steps": 500,
6
+ "global_step": 1,
7
  "is_hyper_param_search": false,
8
  "is_local_process_zero": true,
9
  "is_world_process_zero": true,
10
  "log_history": [
11
  {
12
  "epoch": 0.0,
13
+ "learning_rate": 0.0002,
14
+ "loss": 0.0,
15
  "step": 1
16
  },
17
  {
18
  "epoch": 0.0,
19
+ "step": 1,
20
+ "total_flos": 707252667285504.0,
21
+ "train_loss": 0.0,
22
+ "train_runtime": 7.0368,
23
+ "train_samples_per_second": 1.137,
24
+ "train_steps_per_second": 0.142
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
25
  }
26
  ],
27
  "logging_steps": 5,
28
+ "max_steps": 1,
29
  "num_input_tokens_seen": 0,
30
  "num_train_epochs": 1,
31
  "save_steps": 100,
32
+ "total_flos": 707252667285504.0,
33
  "train_batch_size": 2,
34
  "trial_name": null,
35
  "trial_params": null
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:e9072246606f256490c0ed9a8396d3efa810970ac671473c4e0dcfb4e74a474d
3
  size 4792
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d06b09dae8d8e702b0b8596236066921f2c75edf823ef45d0a34b8875669844d
3
  size 4792