SmartWhatt commited on
Commit
f143ece
·
verified ·
1 Parent(s): b5b9c2d

Training in progress, epoch 1

Browse files
Files changed (29) hide show
  1. README.md +85 -0
  2. config.json +60 -0
  3. generation_config.json +254 -0
  4. model.safetensors +3 -0
  5. preprocessor_config.json +15 -0
  6. runs/Jul17_20-37-43_gpt3/events.out.tfevents.1752759466.gpt3.362004.0 +3 -0
  7. runs/Jul18_09-06-41_gpt3/events.out.tfevents.1752804404.gpt3.397502.0 +3 -0
  8. runs/Jul18_09-12-14_gpt3/events.out.tfevents.1752804737.gpt3.398006.0 +3 -0
  9. runs/Jul18_09-22-19_gpt3/events.out.tfevents.1752805342.gpt3.398204.0 +3 -0
  10. runs/Jul18_11-45-54_gpt3/events.out.tfevents.1752813957.gpt3.403849.0 +3 -0
  11. runs/Jul18_11-52-52_gpt3/events.out.tfevents.1752814375.gpt3.404297.0 +3 -0
  12. runs/Jul18_11-58-50_gpt3/events.out.tfevents.1752814732.gpt3.404578.0 +3 -0
  13. runs/Jul18_12-15-22_gpt3/events.out.tfevents.1752815725.gpt3.405026.0 +3 -0
  14. runs/Jul18_14-02-27_gpt3/events.out.tfevents.1752822149.gpt3.410230.0 +3 -0
  15. runs/Jul18_14-04-03_gpt3/events.out.tfevents.1752822246.gpt3.410423.0 +3 -0
  16. runs/Jul18_14-07-44_gpt3/events.out.tfevents.1752822467.gpt3.411218.0 +3 -0
  17. runs/Jul18_14-13-27_gpt3/events.out.tfevents.1752822810.gpt3.411636.0 +3 -0
  18. runs/Jul18_14-14-34_gpt3/events.out.tfevents.1752822877.gpt3.411809.0 +3 -0
  19. runs/Jul18_14-20-08_gpt3/events.out.tfevents.1752823211.gpt3.412079.0 +3 -0
  20. runs/Jul18_14-22-28_gpt3/events.out.tfevents.1752824202.gpt3.412411.0 +3 -0
  21. runs/Jul18_14-39-29_gpt3/events.out.tfevents.1752824372.gpt3.413182.0 +3 -0
  22. runs/Jul21_09-10-16_gpt3/events.out.tfevents.1753063819.gpt3.578067.0 +3 -0
  23. runs/Jul21_10-52-45_gpt3/events.out.tfevents.1753069967.gpt3.582495.0 +3 -0
  24. runs/Jul21_12-47-20_gpt3/events.out.tfevents.1753076843.gpt3.586663.0 +3 -0
  25. runs/Jul21_14-16-27_gpt3/events.out.tfevents.1753082189.gpt3.590692.0 +3 -0
  26. runs/Jul21_15-44-15_gpt3/events.out.tfevents.1753087458.gpt3.594254.0 +3 -0
  27. runs/Jul21_17-11-03_gpt3/events.out.tfevents.1753092665.gpt3.597839.0 +3 -0
  28. runs/Jul21_17-11-38_gpt3/events.out.tfevents.1753092707.gpt3.597996.0 +3 -0
  29. training_args.bin +3 -0
README.md ADDED
@@ -0,0 +1,85 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ library_name: transformers
3
+ language:
4
+ - th
5
+ license: apache-2.0
6
+ base_model: openai/whisper-small
7
+ tags:
8
+ - generated_from_trainer
9
+ datasets:
10
+ - mozilla-foundation/common_voice_17_0
11
+ metrics:
12
+ - wer
13
+ model-index:
14
+ - name: Rangsitra Whisper
15
+ results:
16
+ - task:
17
+ name: Automatic Speech Recognition
18
+ type: automatic-speech-recognition
19
+ dataset:
20
+ name: Common Voice 17.0
21
+ type: mozilla-foundation/common_voice_17_0
22
+ config: th
23
+ split: None
24
+ args: 'config: th, split: train'
25
+ metrics:
26
+ - name: Wer
27
+ type: wer
28
+ value: 50.5215308065606
29
+ ---
30
+
31
+ <!-- This model card has been generated automatically according to the information the Trainer had access to. You
32
+ should probably proofread and complete it, then remove this comment. -->
33
+
34
+ # Rangsitra Whisper
35
+
36
+ This model is a fine-tuned version of [openai/whisper-small](https://huggingface.co/openai/whisper-small) on the Common Voice 17.0 dataset.
37
+ It achieves the following results on the evaluation set:
38
+ - Cer: 16.9770
39
+ - Wer: 50.5215
40
+ - Loss: 0.4692
41
+
42
+ ## Model description
43
+
44
+ More information needed
45
+
46
+ ## Intended uses & limitations
47
+
48
+ More information needed
49
+
50
+ ## Training and evaluation data
51
+
52
+ More information needed
53
+
54
+ ## Training procedure
55
+
56
+ ### Training hyperparameters
57
+
58
+ The following hyperparameters were used during training:
59
+ - learning_rate: 0.0001
60
+ - train_batch_size: 16
61
+ - eval_batch_size: 16
62
+ - seed: 42
63
+ - optimizer: Use adamw_torch with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments
64
+ - lr_scheduler_type: warmup_stable_decay
65
+ - lr_scheduler_warmup_steps: 312
66
+ - num_epochs: 5
67
+ - mixed_precision_training: Native AMP
68
+
69
+ ### Training results
70
+
71
+ | Training Loss | Epoch | Step | Cer | Wer | Validation Loss |
72
+ |:-------------:|:-----:|:----:|:-------:|:-------:|:---------------:|
73
+ | 0.5794 | 1.0 | 625 | 29.7974 | 76.7516 | 0.6178 |
74
+ | 0.4341 | 2.0 | 1250 | 25.9463 | 72.2551 | 0.5594 |
75
+ | 0.2522 | 3.0 | 1875 | 24.1075 | 66.3984 | 0.5357 |
76
+ | 0.1007 | 4.0 | 2500 | 20.0265 | 57.3414 | 0.4880 |
77
+ | 0.0203 | 5.0 | 3125 | 16.9770 | 50.5215 | 0.4692 |
78
+
79
+
80
+ ### Framework versions
81
+
82
+ - Transformers 4.52.4
83
+ - Pytorch 2.7.1+cu126
84
+ - Datasets 3.6.0
85
+ - Tokenizers 0.21.2
config.json ADDED
@@ -0,0 +1,60 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "activation_dropout": 0.0,
3
+ "activation_function": "gelu",
4
+ "apply_spec_augment": false,
5
+ "architectures": [
6
+ "WhisperForConditionalGeneration"
7
+ ],
8
+ "attention_dropout": 0.0,
9
+ "begin_suppress_tokens": null,
10
+ "bos_token_id": 50257,
11
+ "classifier_proj_size": 256,
12
+ "d_model": 768,
13
+ "decoder_attention_heads": 12,
14
+ "decoder_ffn_dim": 3072,
15
+ "decoder_layerdrop": 0.0,
16
+ "decoder_layers": 12,
17
+ "decoder_start_token_id": 50258,
18
+ "dropout": 0.0,
19
+ "encoder_attention_heads": 12,
20
+ "encoder_ffn_dim": 3072,
21
+ "encoder_layerdrop": 0.0,
22
+ "encoder_layers": 12,
23
+ "eos_token_id": 50257,
24
+ "forced_decoder_ids": [
25
+ [
26
+ 1,
27
+ 50259
28
+ ],
29
+ [
30
+ 2,
31
+ 50359
32
+ ],
33
+ [
34
+ 3,
35
+ 50363
36
+ ]
37
+ ],
38
+ "init_std": 0.02,
39
+ "is_encoder_decoder": true,
40
+ "mask_feature_length": 10,
41
+ "mask_feature_min_masks": 0,
42
+ "mask_feature_prob": 0.0,
43
+ "mask_time_length": 10,
44
+ "mask_time_min_masks": 2,
45
+ "mask_time_prob": 0.05,
46
+ "max_length": null,
47
+ "max_source_positions": 1500,
48
+ "max_target_positions": 448,
49
+ "median_filter_width": 7,
50
+ "model_type": "whisper",
51
+ "num_hidden_layers": 12,
52
+ "num_mel_bins": 80,
53
+ "pad_token_id": 50257,
54
+ "scale_embedding": false,
55
+ "torch_dtype": "float32",
56
+ "transformers_version": "4.52.4",
57
+ "use_cache": false,
58
+ "use_weighted_layer_sum": false,
59
+ "vocab_size": 51866
60
+ }
generation_config.json ADDED
@@ -0,0 +1,254 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "alignment_heads": [
3
+ [
4
+ 5,
5
+ 3
6
+ ],
7
+ [
8
+ 5,
9
+ 9
10
+ ],
11
+ [
12
+ 8,
13
+ 0
14
+ ],
15
+ [
16
+ 8,
17
+ 4
18
+ ],
19
+ [
20
+ 8,
21
+ 7
22
+ ],
23
+ [
24
+ 8,
25
+ 8
26
+ ],
27
+ [
28
+ 9,
29
+ 0
30
+ ],
31
+ [
32
+ 9,
33
+ 7
34
+ ],
35
+ [
36
+ 9,
37
+ 9
38
+ ],
39
+ [
40
+ 10,
41
+ 5
42
+ ]
43
+ ],
44
+ "begin_suppress_tokens": [
45
+ 220,
46
+ 50257
47
+ ],
48
+ "bos_token_id": 50257,
49
+ "decoder_start_token_id": 50258,
50
+ "eos_token_id": 50257,
51
+ "is_multilingual": true,
52
+ "lang_to_id": {
53
+ "<|af|>": 50327,
54
+ "<|am|>": 50334,
55
+ "<|ar|>": 50272,
56
+ "<|as|>": 50350,
57
+ "<|az|>": 50304,
58
+ "<|ba|>": 50355,
59
+ "<|be|>": 50330,
60
+ "<|bg|>": 50292,
61
+ "<|bn|>": 50302,
62
+ "<|bo|>": 50347,
63
+ "<|br|>": 50309,
64
+ "<|bs|>": 50315,
65
+ "<|ca|>": 50270,
66
+ "<|cs|>": 50283,
67
+ "<|cy|>": 50297,
68
+ "<|da|>": 50285,
69
+ "<|de|>": 50261,
70
+ "<|el|>": 50281,
71
+ "<|en|>": 50259,
72
+ "<|es|>": 50262,
73
+ "<|et|>": 50307,
74
+ "<|eu|>": 50310,
75
+ "<|fa|>": 50300,
76
+ "<|fi|>": 50277,
77
+ "<|fo|>": 50338,
78
+ "<|fr|>": 50265,
79
+ "<|gl|>": 50319,
80
+ "<|gu|>": 50333,
81
+ "<|haw|>": 50352,
82
+ "<|ha|>": 50354,
83
+ "<|he|>": 50279,
84
+ "<|hi|>": 50276,
85
+ "<|hr|>": 50291,
86
+ "<|ht|>": 50339,
87
+ "<|hu|>": 50286,
88
+ "<|hy|>": 50312,
89
+ "<|id|>": 50275,
90
+ "<|is|>": 50311,
91
+ "<|it|>": 50274,
92
+ "<|ja|>": 50266,
93
+ "<|jw|>": 50356,
94
+ "<|ka|>": 50329,
95
+ "<|kk|>": 50316,
96
+ "<|km|>": 50323,
97
+ "<|kn|>": 50306,
98
+ "<|ko|>": 50264,
99
+ "<|la|>": 50294,
100
+ "<|lb|>": 50345,
101
+ "<|ln|>": 50353,
102
+ "<|lo|>": 50336,
103
+ "<|lt|>": 50293,
104
+ "<|lv|>": 50301,
105
+ "<|mg|>": 50349,
106
+ "<|mi|>": 50295,
107
+ "<|mk|>": 50308,
108
+ "<|ml|>": 50296,
109
+ "<|mn|>": 50314,
110
+ "<|mr|>": 50320,
111
+ "<|ms|>": 50282,
112
+ "<|mt|>": 50343,
113
+ "<|my|>": 50346,
114
+ "<|ne|>": 50313,
115
+ "<|nl|>": 50271,
116
+ "<|nn|>": 50342,
117
+ "<|no|>": 50288,
118
+ "<|oc|>": 50328,
119
+ "<|pa|>": 50321,
120
+ "<|pl|>": 50269,
121
+ "<|ps|>": 50340,
122
+ "<|pt|>": 50267,
123
+ "<|ro|>": 50284,
124
+ "<|ru|>": 50263,
125
+ "<|sa|>": 50344,
126
+ "<|sd|>": 50332,
127
+ "<|si|>": 50322,
128
+ "<|sk|>": 50298,
129
+ "<|sl|>": 50305,
130
+ "<|sn|>": 50324,
131
+ "<|so|>": 50326,
132
+ "<|sq|>": 50317,
133
+ "<|sr|>": 50303,
134
+ "<|su|>": 50357,
135
+ "<|sv|>": 50273,
136
+ "<|sw|>": 50318,
137
+ "<|ta|>": 50287,
138
+ "<|te|>": 50299,
139
+ "<|tg|>": 50331,
140
+ "<|th|>": 50289,
141
+ "<|tk|>": 50341,
142
+ "<|tl|>": 50348,
143
+ "<|tr|>": 50268,
144
+ "<|tt|>": 50351,
145
+ "<|uk|>": 50280,
146
+ "<|ur|>": 50290,
147
+ "<|uz|>": 50337,
148
+ "<|vi|>": 50278,
149
+ "<|yi|>": 50335,
150
+ "<|yo|>": 50325,
151
+ "<|zh|>": 50260
152
+ },
153
+ "language": "Thai",
154
+ "max_initial_timestamp_index": 50,
155
+ "max_length": 448,
156
+ "no_timestamps_token_id": 50363,
157
+ "pad_token_id": 50257,
158
+ "prev_sot_token_id": 50361,
159
+ "return_timestamps": false,
160
+ "suppress_tokens": [
161
+ 1,
162
+ 2,
163
+ 7,
164
+ 8,
165
+ 9,
166
+ 10,
167
+ 14,
168
+ 25,
169
+ 26,
170
+ 27,
171
+ 28,
172
+ 29,
173
+ 31,
174
+ 58,
175
+ 59,
176
+ 60,
177
+ 61,
178
+ 62,
179
+ 63,
180
+ 90,
181
+ 91,
182
+ 92,
183
+ 93,
184
+ 359,
185
+ 503,
186
+ 522,
187
+ 542,
188
+ 873,
189
+ 893,
190
+ 902,
191
+ 918,
192
+ 922,
193
+ 931,
194
+ 1350,
195
+ 1853,
196
+ 1982,
197
+ 2460,
198
+ 2627,
199
+ 3246,
200
+ 3253,
201
+ 3268,
202
+ 3536,
203
+ 3846,
204
+ 3961,
205
+ 4183,
206
+ 4667,
207
+ 6585,
208
+ 6647,
209
+ 7273,
210
+ 9061,
211
+ 9383,
212
+ 10428,
213
+ 10929,
214
+ 11938,
215
+ 12033,
216
+ 12331,
217
+ 12562,
218
+ 13793,
219
+ 14157,
220
+ 14635,
221
+ 15265,
222
+ 15618,
223
+ 16553,
224
+ 16604,
225
+ 18362,
226
+ 18956,
227
+ 20075,
228
+ 21675,
229
+ 22520,
230
+ 26130,
231
+ 26161,
232
+ 26435,
233
+ 28279,
234
+ 29464,
235
+ 31650,
236
+ 32302,
237
+ 32470,
238
+ 36865,
239
+ 42863,
240
+ 47425,
241
+ 49870,
242
+ 50254,
243
+ 50258,
244
+ 50360,
245
+ 50361,
246
+ 50362
247
+ ],
248
+ "task": "transcribe",
249
+ "task_to_id": {
250
+ "transcribe": 50359,
251
+ "translate": 50358
252
+ },
253
+ "transformers_version": "4.52.4"
254
+ }
model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9693f74e1cfde8a18a75fa21739ff4058359a41b1d7126d2e47095984aaf8376
3
+ size 966998152
preprocessor_config.json ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "chunk_length": 30,
3
+ "dither": 0.0,
4
+ "feature_extractor_type": "WhisperFeatureExtractor",
5
+ "feature_size": 80,
6
+ "hop_length": 160,
7
+ "n_fft": 400,
8
+ "n_samples": 480000,
9
+ "nb_max_frames": 3000,
10
+ "padding_side": "right",
11
+ "padding_value": 0.0,
12
+ "processor_class": "WhisperProcessor",
13
+ "return_attention_mask": false,
14
+ "sampling_rate": 16000
15
+ }
runs/Jul17_20-37-43_gpt3/events.out.tfevents.1752759466.gpt3.362004.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c053a4e0e10d16d545dccaff3c500aeef4bc03abfa89b32ecf8ef812a6fd65f5
3
+ size 35165
runs/Jul18_09-06-41_gpt3/events.out.tfevents.1752804404.gpt3.397502.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2cbe7ae3d9100f2e7cc6ad02c0494576dcc2bf17a15c022dc0e14e2932dda0f4
3
+ size 11988
runs/Jul18_09-12-14_gpt3/events.out.tfevents.1752804737.gpt3.398006.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:df367afa3ab29c6927103af3c70530938d1dcf05b1af99f04c91251d979e5d8a
3
+ size 6733
runs/Jul18_09-22-19_gpt3/events.out.tfevents.1752805342.gpt3.398204.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2d989fa5a7263f916f112f55af76558849527451f1811b65056157a82803b786
3
+ size 35267
runs/Jul18_11-45-54_gpt3/events.out.tfevents.1752813957.gpt3.403849.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0cbecd112669b903539ed2b6482c1446d94bbe78652ce495d7cd178ed98204af
3
+ size 11988
runs/Jul18_11-52-52_gpt3/events.out.tfevents.1752814375.gpt3.404297.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0968360afb0fa97103a0ac819e10ba18f0e81da466da0669b2e9cd25102ae1b7
3
+ size 11988
runs/Jul18_11-58-50_gpt3/events.out.tfevents.1752814732.gpt3.404578.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:26740767ffa4b72bf8e8a833378117afa06c0b159535116b9fa444ce26ca3b97
3
+ size 11988
runs/Jul18_12-15-22_gpt3/events.out.tfevents.1752815725.gpt3.405026.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:70d53b4bb8a53df2398430c63fbefcad939c9caef80b3147eac0270cac5fc318
3
+ size 35267
runs/Jul18_14-02-27_gpt3/events.out.tfevents.1752822149.gpt3.410230.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b51f5ce61310d88e36f0b9888d22daa8049786406cefa040d6b4be579b6a7b05
3
+ size 6733
runs/Jul18_14-04-03_gpt3/events.out.tfevents.1752822246.gpt3.410423.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3bf82187f89ff10283a977f65e6bbb8d418ce0c752ce0513a3981472933f476d
3
+ size 6733
runs/Jul18_14-07-44_gpt3/events.out.tfevents.1752822467.gpt3.411218.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:887b94d43c06c4e8bf2e9f86509ccc8e335294d19d25f657480725ee84105e05
3
+ size 11988
runs/Jul18_14-13-27_gpt3/events.out.tfevents.1752822810.gpt3.411636.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:83b6e7a877830430c66bfc4f7e9bdf11aa5cae5648e97aeaf819cb9add9acbd7
3
+ size 6940
runs/Jul18_14-14-34_gpt3/events.out.tfevents.1752822877.gpt3.411809.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a3022f26f6b983edb0d203692c1024cf2ae43cb19d49229734629f1a0aaa5815
3
+ size 11988
runs/Jul18_14-20-08_gpt3/events.out.tfevents.1752823211.gpt3.412079.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:817dc085990f2c49ce9cc3f53ddbe19bb4e3ca5172439d415d3586f505ff55fa
3
+ size 8190
runs/Jul18_14-22-28_gpt3/events.out.tfevents.1752824202.gpt3.412411.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d2b8e3c89a2475a521f823ae583616cfc70e368ea03646e0dafee86bcaeeda43
3
+ size 448
runs/Jul18_14-39-29_gpt3/events.out.tfevents.1752824372.gpt3.413182.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5c45ac4975cbfc00298fe7d8d82ffd6a9905f775dffb250f8fafc25e99bb9396
3
+ size 35267
runs/Jul21_09-10-16_gpt3/events.out.tfevents.1753063819.gpt3.578067.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bf1aeb07679c787f4bb5d31d1ed2e9093e027cad23131109271dd7c45a5d850b
3
+ size 35267
runs/Jul21_10-52-45_gpt3/events.out.tfevents.1753069967.gpt3.582495.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:57596703569427d6d74ac320e860f88b2f5dda04b5ace7ac4b06d217d16ad1c5
3
+ size 35311
runs/Jul21_12-47-20_gpt3/events.out.tfevents.1753076843.gpt3.586663.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ee7a893cd17a9e3e289720566c5f217074f54ecd24451371d1b5959e22a4a153
3
+ size 35311
runs/Jul21_14-16-27_gpt3/events.out.tfevents.1753082189.gpt3.590692.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:705162c3ca0aa0a62351e8166a5a814c2bed18c905b9d648cde58bf454d15ef6
3
+ size 35311
runs/Jul21_15-44-15_gpt3/events.out.tfevents.1753087458.gpt3.594254.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e14996c96a00b27c29b4a4d5cd3e34e1a92c30619a4c7facb752b4ad7db1f088
3
+ size 35312
runs/Jul21_17-11-03_gpt3/events.out.tfevents.1753092665.gpt3.597839.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:24e45bda27a20c849a6ec05c945219074f3c1f02f0c73376885f489a2f00d45e
3
+ size 6777
runs/Jul21_17-11-38_gpt3/events.out.tfevents.1753092707.gpt3.597996.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1c514f877e94e8c18bdd79349ae0bd9e9499669d2982fd6ffbaf08d302e1f119
3
+ size 12397
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2fe1e26e4e94c337165f38b07120ebf5fea71cb40976b717cc8c864c59c3c10d
3
+ size 5969