jonasknobloch commited on
Commit
ebd99fd
·
verified ·
1 Parent(s): 16b9ff3

Upload folder using huggingface_hub

Browse files
README.md ADDED
@@ -0,0 +1,89 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ tags:
3
+ - generated_from_trainer
4
+ datasets:
5
+ - roneneldan/TinyStories
6
+ metrics:
7
+ - accuracy
8
+ model-index:
9
+ - name: gpt2_u030_tiny-stories_1024_dpos
10
+ results:
11
+ - task:
12
+ name: Causal Language Modeling
13
+ type: text-generation
14
+ dataset:
15
+ name: roneneldan/TinyStories
16
+ type: roneneldan/TinyStories
17
+ metrics:
18
+ - name: Accuracy
19
+ type: accuracy
20
+ value: 0.6788553562327562
21
+ ---
22
+
23
+ <!-- This model card has been generated automatically according to the information the Trainer had access to. You
24
+ should probably proofread and complete it, then remove this comment. -->
25
+
26
+ [<img src="https://raw.githubusercontent.com/wandb/assets/main/wandb-github-badge-28.svg" alt="Visualize in Weights & Biases" width="200" height="32"/>](https://wandb.ai/scads-nlp/morph-gpt_gpt2_tiny-stories_dpos/runs/s3ht4a0t)
27
+ # gpt2_u030_tiny-stories_1024_dpos
28
+
29
+ This model is a fine-tuned version of [](https://huggingface.co/) on the roneneldan/TinyStories dataset.
30
+ It achieves the following results on the evaluation set:
31
+ - Loss: 1.2107
32
+ - Accuracy: 0.6789
33
+
34
+ ## Model description
35
+
36
+ More information needed
37
+
38
+ ## Intended uses & limitations
39
+
40
+ More information needed
41
+
42
+ ## Training and evaluation data
43
+
44
+ More information needed
45
+
46
+ ## Training procedure
47
+
48
+ ### Training hyperparameters
49
+
50
+ The following hyperparameters were used during training:
51
+ - learning_rate: 5e-05
52
+ - train_batch_size: 32
53
+ - eval_batch_size: 32
54
+ - seed: 42
55
+ - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
56
+ - lr_scheduler_type: linear
57
+ - num_epochs: 1.0
58
+
59
+ ### Training results
60
+
61
+ | Training Loss | Epoch | Step | Validation Loss | Accuracy |
62
+ |:-------------:|:------:|:-----:|:---------------:|:--------:|
63
+ | 2.92 | 0.0523 | 1000 | 2.4566 | 0.4470 |
64
+ | 1.9818 | 0.1046 | 2000 | 1.8089 | 0.5672 |
65
+ | 1.7321 | 0.1570 | 3000 | 1.6133 | 0.6021 |
66
+ | 1.6137 | 0.2093 | 4000 | 1.5171 | 0.6195 |
67
+ | 1.5353 | 0.2616 | 5000 | 1.4516 | 0.6312 |
68
+ | 1.4845 | 0.3139 | 6000 | 1.4056 | 0.6400 |
69
+ | 1.4443 | 0.3662 | 7000 | 1.3718 | 0.6466 |
70
+ | 1.4118 | 0.4186 | 8000 | 1.3420 | 0.6525 |
71
+ | 1.3878 | 0.4709 | 9000 | 1.3189 | 0.6569 |
72
+ | 1.3661 | 0.5232 | 10000 | 1.2988 | 0.6608 |
73
+ | 1.3485 | 0.5755 | 11000 | 1.2834 | 0.6639 |
74
+ | 1.3326 | 0.6278 | 12000 | 1.2675 | 0.6669 |
75
+ | 1.319 | 0.6802 | 13000 | 1.2555 | 0.6694 |
76
+ | 1.3068 | 0.7325 | 14000 | 1.2440 | 0.6719 |
77
+ | 1.2932 | 0.7848 | 15000 | 1.2350 | 0.6737 |
78
+ | 1.2868 | 0.8371 | 16000 | 1.2263 | 0.6755 |
79
+ | 1.2791 | 0.8894 | 17000 | 1.2193 | 0.6771 |
80
+ | 1.2725 | 0.9418 | 18000 | 1.2141 | 0.6780 |
81
+ | 1.2711 | 0.9941 | 19000 | 1.2108 | 0.6788 |
82
+
83
+
84
+ ### Framework versions
85
+
86
+ - Transformers 4.42.3
87
+ - Pytorch 2.2.2+cu121
88
+ - Datasets 2.20.0
89
+ - Tokenizers 0.19.1
all_results.json ADDED
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 1.0,
3
+ "eval_accuracy": 0.6788553562327562,
4
+ "eval_loss": 1.2106505632400513,
5
+ "eval_runtime": 53.3968,
6
+ "eval_samples": 6151,
7
+ "eval_samples_per_second": 115.194,
8
+ "eval_steps_per_second": 3.614,
9
+ "perplexity": 3.35566701619586,
10
+ "total_flos": 3.19615549046784e+17,
11
+ "train_loss": 1.5581016430754755,
12
+ "train_runtime": 7493.9423,
13
+ "train_samples": 611606,
14
+ "train_samples_per_second": 81.613,
15
+ "train_steps_per_second": 2.55
16
+ }
config.json ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "activation_function": "gelu_new",
3
+ "architectures": [
4
+ "GPT2LMHeadModel"
5
+ ],
6
+ "attn_pdrop": 0.1,
7
+ "bos_token_id": 1024,
8
+ "embd_pdrop": 0.1,
9
+ "eos_token_id": 1024,
10
+ "initializer_range": 0.02,
11
+ "layer_norm_epsilon": 1e-05,
12
+ "model_type": "gpt2",
13
+ "n_embd": 768,
14
+ "n_head": 12,
15
+ "n_inner": null,
16
+ "n_layer": 12,
17
+ "n_positions": 2048,
18
+ "reorder_and_upcast_attn": false,
19
+ "resid_pdrop": 0.1,
20
+ "scale_attn_by_inverse_layer_idx": false,
21
+ "scale_attn_weights": true,
22
+ "summary_activation": null,
23
+ "summary_first_dropout": 0.1,
24
+ "summary_proj_to_labels": true,
25
+ "summary_type": "cls_index",
26
+ "summary_use_proj": true,
27
+ "torch_dtype": "float32",
28
+ "transformers_version": "4.42.3",
29
+ "use_cache": true,
30
+ "vocab_size": 50257
31
+ }
eval_results.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 1.0,
3
+ "eval_accuracy": 0.6788553562327562,
4
+ "eval_loss": 1.2106505632400513,
5
+ "eval_runtime": 53.3968,
6
+ "eval_samples": 6151,
7
+ "eval_samples_per_second": 115.194,
8
+ "eval_steps_per_second": 3.614,
9
+ "perplexity": 3.35566701619586
10
+ }
generation_config.json ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 1024,
4
+ "eos_token_id": 1024,
5
+ "transformers_version": "4.42.3"
6
+ }
model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3dc79872bad471e18a190a91f687e947f31102ddfa2472882038e0a482af24cb
3
+ size 500919936
special_tokens_map.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {}
tokenizer.json ADDED
@@ -0,0 +1,1919 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "version": "1.0",
3
+ "truncation": null,
4
+ "padding": null,
5
+ "added_tokens": [
6
+ {
7
+ "id": 1024,
8
+ "content": "<|endoftext|>",
9
+ "single_word": false,
10
+ "lstrip": false,
11
+ "rstrip": false,
12
+ "normalized": true,
13
+ "special": true
14
+ }
15
+ ],
16
+ "normalizer": null,
17
+ "pre_tokenizer": {
18
+ "type": "ByteLevel",
19
+ "add_prefix_space": false,
20
+ "trim_offsets": true,
21
+ "use_regex": true
22
+ },
23
+ "post_processor": {
24
+ "type": "ByteLevel",
25
+ "add_prefix_space": true,
26
+ "trim_offsets": false,
27
+ "use_regex": true
28
+ },
29
+ "decoder": {
30
+ "type": "ByteLevel",
31
+ "add_prefix_space": true,
32
+ "trim_offsets": true,
33
+ "use_regex": true
34
+ },
35
+ "model": {
36
+ "type": "BPE",
37
+ "dropout": null,
38
+ "unk_token": null,
39
+ "continuing_subword_prefix": null,
40
+ "end_of_word_suffix": null,
41
+ "fuse_unk": false,
42
+ "byte_fallback": false,
43
+ "ignore_merges": false,
44
+ "vocab": {
45
+ "!": 0,
46
+ "\"": 1,
47
+ "#": 2,
48
+ "$": 3,
49
+ "%": 4,
50
+ "&": 5,
51
+ "'": 6,
52
+ "(": 7,
53
+ ")": 8,
54
+ "*": 9,
55
+ "+": 10,
56
+ ",": 11,
57
+ "-": 12,
58
+ ".": 13,
59
+ "/": 14,
60
+ "0": 15,
61
+ "1": 16,
62
+ "2": 17,
63
+ "3": 18,
64
+ "4": 19,
65
+ "5": 20,
66
+ "6": 21,
67
+ "7": 22,
68
+ "8": 23,
69
+ "9": 24,
70
+ ":": 25,
71
+ ";": 26,
72
+ "<": 27,
73
+ "=": 28,
74
+ ">": 29,
75
+ "?": 30,
76
+ "@": 31,
77
+ "A": 32,
78
+ "B": 33,
79
+ "C": 34,
80
+ "D": 35,
81
+ "E": 36,
82
+ "F": 37,
83
+ "G": 38,
84
+ "H": 39,
85
+ "I": 40,
86
+ "J": 41,
87
+ "K": 42,
88
+ "L": 43,
89
+ "M": 44,
90
+ "N": 45,
91
+ "O": 46,
92
+ "P": 47,
93
+ "Q": 48,
94
+ "R": 49,
95
+ "S": 50,
96
+ "T": 51,
97
+ "U": 52,
98
+ "V": 53,
99
+ "W": 54,
100
+ "X": 55,
101
+ "Y": 56,
102
+ "Z": 57,
103
+ "[": 58,
104
+ "\\": 59,
105
+ "]": 60,
106
+ "_": 61,
107
+ "`": 62,
108
+ "a": 63,
109
+ "b": 64,
110
+ "c": 65,
111
+ "d": 66,
112
+ "e": 67,
113
+ "f": 68,
114
+ "g": 69,
115
+ "h": 70,
116
+ "i": 71,
117
+ "j": 72,
118
+ "k": 73,
119
+ "l": 74,
120
+ "m": 75,
121
+ "n": 76,
122
+ "o": 77,
123
+ "p": 78,
124
+ "q": 79,
125
+ "r": 80,
126
+ "s": 81,
127
+ "t": 82,
128
+ "u": 83,
129
+ "v": 84,
130
+ "w": 85,
131
+ "x": 86,
132
+ "y": 87,
133
+ "z": 88,
134
+ "{": 89,
135
+ "|": 90,
136
+ "}": 91,
137
+ "~": 92,
138
+ "¡": 93,
139
+ "¢": 94,
140
+ "£": 95,
141
+ "¤": 96,
142
+ "¥": 97,
143
+ "¦": 98,
144
+ "§": 99,
145
+ "¨": 100,
146
+ "©": 101,
147
+ "ª": 102,
148
+ "«": 103,
149
+ "¬": 104,
150
+ "®": 105,
151
+ "¯": 106,
152
+ "°": 107,
153
+ "±": 108,
154
+ "³": 109,
155
+ "´": 110,
156
+ "µ": 111,
157
+ "¶": 112,
158
+ "·": 113,
159
+ "¸": 114,
160
+ "¹": 115,
161
+ "º": 116,
162
+ "»": 117,
163
+ "¼": 118,
164
+ "½": 119,
165
+ "¾": 120,
166
+ "¿": 121,
167
+ "Â": 122,
168
+ "Ã": 123,
169
+ "Ä": 124,
170
+ "Å": 125,
171
+ "É": 126,
172
+ "Ê": 127,
173
+ "Ñ": 128,
174
+ "Ò": 129,
175
+ "á": 130,
176
+ "â": 131,
177
+ "ã": 132,
178
+ "ä": 133,
179
+ "å": 134,
180
+ "æ": 135,
181
+ "ç": 136,
182
+ "è": 137,
183
+ "é": 138,
184
+ "î": 139,
185
+ "ï": 140,
186
+ "ð": 141,
187
+ "ĉ": 142,
188
+ "Ċ": 143,
189
+ "Ġ": 144,
190
+ "Ģ": 145,
191
+ "ģ": 146,
192
+ "Ĥ": 147,
193
+ "ĥ": 148,
194
+ "Ħ": 149,
195
+ "ħ": 150,
196
+ "Ĩ": 151,
197
+ "ĩ": 152,
198
+ "Ī": 153,
199
+ "ī": 154,
200
+ "Ĭ": 155,
201
+ "ĭ": 156,
202
+ "Į": 157,
203
+ "į": 158,
204
+ "İ": 159,
205
+ "ı": 160,
206
+ "IJ": 161,
207
+ "ij": 162,
208
+ "Ĵ": 163,
209
+ "ĵ": 164,
210
+ "Ķ": 165,
211
+ "ķ": 166,
212
+ "ĸ": 167,
213
+ "ĺ": 168,
214
+ "Ļ": 169,
215
+ "ļ": 170,
216
+ "Ľ": 171,
217
+ "ľ": 172,
218
+ "Ŀ": 173,
219
+ "ŀ": 174,
220
+ "Ł": 175,
221
+ "ł": 176,
222
+ "Ń": 177,
223
+ "Ġt": 178,
224
+ "Ġa": 179,
225
+ "he": 180,
226
+ "Ġs": 181,
227
+ "nd": 182,
228
+ "Ġw": 183,
229
+ "Ġthe": 184,
230
+ "ed": 185,
231
+ "Ġb": 186,
232
+ "ĠT": 187,
233
+ "Ġto": 188,
234
+ "Ġand": 189,
235
+ "Ġh": 190,
236
+ "Ġf": 191,
237
+ "in": 192,
238
+ "Ġwa": 193,
239
+ "re": 194,
240
+ "it": 195,
241
+ "ou": 196,
242
+ "Ġl": 197,
243
+ "Ġd": 198,
244
+ "Ġc": 199,
245
+ "Ġp": 200,
246
+ "ay": 201,
247
+ "Ġm": 202,
248
+ "er": 203,
249
+ "Ġwas": 204,
250
+ "om": 205,
251
+ "Ġhe": 206,
252
+ "is": 207,
253
+ "ĠThe": 208,
254
+ "im": 209,
255
+ "Ġn": 210,
256
+ "on": 211,
257
+ "ĠS": 212,
258
+ "ar": 213,
259
+ "Ġsa": 214,
260
+ "ll": 215,
261
+ "id": 216,
262
+ "Ġha": 217,
263
+ "at": 218,
264
+ "Ġg": 219,
265
+ "ing": 220,
266
+ "en": 221,
267
+ "ot": 222,
268
+ "an": 223,
269
+ "le": 224,
270
+ "end": 225,
271
+ "or": 226,
272
+ "Ġ\"": 227,
273
+ "of": 228,
274
+ "ĠH": 229,
275
+ "ir": 230,
276
+ "am": 231,
277
+ "et": 232,
278
+ "Ġit": 233,
279
+ "Ġth": 234,
280
+ "ig": 235,
281
+ "ĠHe": 236,
282
+ "Ġin": 237,
283
+ "Ġpl": 238,
284
+ "ĠO": 239,
285
+ "ly": 240,
286
+ "ri": 241,
287
+ "ver": 242,
288
+ "ut": 243,
289
+ "ow": 244,
290
+ "Ġbe": 245,
291
+ "Ġu": 246,
292
+ "Ġsaid": 247,
293
+ "Ġplay": 248,
294
+ "ith": 249,
295
+ "Ġwith": 250,
296
+ "Ġday": 251,
297
+ "Ġy": 252,
298
+ "ked": 253,
299
+ "pp": 254,
300
+ "ex": 255,
301
+ "Ġr": 256,
302
+ "ce": 257,
303
+ "ld": 258,
304
+ "oo": 259,
305
+ "ĠI": 260,
306
+ "ĠThey": 261,
307
+ "Ġher": 262,
308
+ "ĠL": 263,
309
+ "Ġhis": 264,
310
+ "Ġst": 265,
311
+ "es": 266,
312
+ "ĠOn": 267,
313
+ "ĠB": 268,
314
+ "ck": 269,
315
+ "Ġbig": 270,
316
+ "nt": 271,
317
+ "Ġyou": 272,
318
+ "ke": 273,
319
+ "ext": 274,
320
+ "very": 275,
321
+ "oft": 276,
322
+ "ĠM": 277,
323
+ "Ġon": 278,
324
+ "un": 279,
325
+ "Ġhapp": 280,
326
+ "st": 281,
327
+ "ve": 282,
328
+ "Ġfri": 283,
329
+ "Ġfriend": 284,
330
+ "ĠTim": 285,
331
+ "Ġli": 286,
332
+ "ĠLi": 287,
333
+ "Ġthey": 288,
334
+ "all": 289,
335
+ "Ġwe": 290,
336
+ "Ġhad": 291,
337
+ "Ġnot": 292,
338
+ "Ġup": 293,
339
+ "Ġwant": 294,
340
+ "Ġof": 295,
341
+ "her": 296,
342
+ "ĠShe": 297,
343
+ "ad": 298,
344
+ "Ġ<": 299,
345
+ "|>": 300,
346
+ "Ġ<|": 301,
347
+ "itt": 302,
348
+ "Ġdo": 303,
349
+ "Ġe": 304,
350
+ "se": 305,
351
+ "ĠA": 306,
352
+ "Ġhappy": 307,
353
+ "Ġvery": 308,
354
+ "ent": 309,
355
+ "Ġthat": 310,
356
+ "Ġsaw": 311,
357
+ "'s": 312,
358
+ "Ġfor": 313,
359
+ "Ġsh": 314,
360
+ "Ġmom": 315,
361
+ "ittle": 316,
362
+ "ould": 317,
363
+ "Ġlittle": 318,
364
+ "endoft": 319,
365
+ "Ġshe": 320,
366
+ "Ġk": 321,
367
+ "ime": 322,
368
+ "ch": 323,
369
+ ".\"": 324,
370
+ "Ġnam": 325,
371
+ "Ġtime": 326,
372
+ "Ġsm": 327,
373
+ "Ġso": 328,
374
+ "ound": 329,
375
+ "Ġthere": 330,
376
+ "Ġnamed": 331,
377
+ "Ġbo": 332,
378
+ "Ġwere": 333,
379
+ "Ġwanted": 334,
380
+ "Ġbut": 335,
381
+ "ved": 336,
382
+ "Ġfriends": 337,
383
+ "out": 338,
384
+ "ht": 339,
385
+ "!\"": 340,
386
+ "endoftext": 341,
387
+ "Ġne": 342,
388
+ "Ġan": 343,
389
+ "ird": 344,
390
+ "al": 345,
391
+ "Ġbird": 346,
392
+ "ue": 347,
393
+ "ĠIt": 348,
394
+ "Ġtoo": 349,
395
+ "ĠĊ": 350,
396
+ "il": 351,
397
+ "Ġhel": 352,
398
+ "el": 353,
399
+ "ide": 354,
400
+ "ome": 355,
401
+ "Ġwent": 356,
402
+ "Ġwh": 357,
403
+ "ry": 358,
404
+ "ĠLily": 359,
405
+ "ĠOne": 360,
406
+ "Ġis": 361,
407
+ "Ġall": 362,
408
+ "Ġlo": 363,
409
+ "ake": 364,
410
+ "Ġloo": 365,
411
+ "Ġupon": 366,
412
+ "ter": 367,
413
+ "ĠTom": 368,
414
+ "ug": 369,
415
+ "Ġhelp": 370,
416
+ "ore": 371,
417
+ "ame": 372,
418
+ "Ġfun": 373,
419
+ "ind": 374,
420
+ "Ġtoy": 375,
421
+ "Ġas": 376,
422
+ "get": 377,
423
+ "Ġj": 378,
424
+ "Ġat": 379,
425
+ "Ġre": 380,
426
+ "ra": 381,
427
+ "gether": 382,
428
+ "Ġo": 383,
429
+ "Ġse": 384,
430
+ "ill": 385,
431
+ "ur": 386,
432
+ "Ġdid": 387,
433
+ "Ġtogether": 388,
434
+ "Ġba": 389,
435
+ "Ġcat": 390,
436
+ "Ġtre": 391,
437
+ "ood": 392,
438
+ "ted": 393,
439
+ "ic": 394,
440
+ "my": 395,
441
+ "Ġdog": 396,
442
+ "Ġcan": 397,
443
+ "ec": 398,
444
+ "Ġcould": 399,
445
+ "Ġtheir": 400,
446
+ "ard": 401,
447
+ "hed": 402,
448
+ "ax": 403,
449
+ "Ġgir": 404,
450
+ "Ġhim": 405,
451
+ "Ġro": 406,
452
+ "Ġplayed": 407,
453
+ "Ġball": 408,
454
+ "ark": 409,
455
+ "Ġkn": 410,
456
+ "?\"": 411,
457
+ "Ġgirl": 412,
458
+ "ĠOnce": 413,
459
+ "way": 414,
460
+ "Ġgo": 415,
461
+ "Ġle": 416,
462
+ "Ġare": 417,
463
+ "Ġfr": 418,
464
+ "ĠW": 419,
465
+ "Ġout": 420,
466
+ "ain": 421,
467
+ "'t": 422,
468
+ "Ġthem": 423,
469
+ "Ġsad": 424,
470
+ "ul": 425,
471
+ "um": 426,
472
+ "Ġboy": 427,
473
+ "Ġtree": 428,
474
+ "Ġcl": 429,
475
+ "Ġhave": 430,
476
+ "one": 431,
477
+ "Ġman": 432,
478
+ "hen": 433,
479
+ "Ġlooked": 434,
480
+ "Ġloved": 435,
481
+ "Ġfound": 436,
482
+ "other": 437,
483
+ "Ġsp": 438,
484
+ "oug": 439,
485
+ "Ġsc": 440,
486
+ "Ġstar": 441,
487
+ "Ġbec": 442,
488
+ "hing": 443,
489
+ "Ġback": 444,
490
+ "ĠJ": 445,
491
+ "ful": 446,
492
+ "Ġla": 447,
493
+ "own": 448,
494
+ "side": 449,
495
+ "Ġme": 450,
496
+ "are": 451,
497
+ "Ġpark": 452,
498
+ "Ġcar": 453,
499
+ "ong": 454,
500
+ "ight": 455,
501
+ "elt": 456,
502
+ "Ġwould": 457,
503
+ "op": 458,
504
+ "round": 459,
505
+ "Ġfa": 460,
506
+ "Ġlike": 461,
507
+ "Ġfelt": 462,
508
+ "Ġsee": 463,
509
+ "Ġno": 464,
510
+ "ĠF": 465,
511
+ "Ġasked": 466,
512
+ "omet": 467,
513
+ "Ġnew": 468,
514
+ "ĠSp": 469,
515
+ "Ġstarted": 470,
516
+ "ared": 471,
517
+ "Ġcame": 472,
518
+ "ice": 473,
519
+ "ouse": 474,
520
+ "Ġal": 475,
521
+ "ĠBut": 476,
522
+ "ia": 477,
523
+ "ss": 478,
524
+ "Ġmake": 479,
525
+ "Ġsomet": 480,
526
+ "ought": 481,
527
+ "Ġother": 482,
528
+ "Ġag": 483,
529
+ "Ġbr": 484,
530
+ "Ġgood": 485,
531
+ "Ġsmall": 486,
532
+ "ĠSue": 487,
533
+ "ade": 488,
534
+ "ob": 489,
535
+ "ell": 490,
536
+ "ings": 491,
537
+ "ried": 492,
538
+ "ĠMax": 493,
539
+ "ened": 494,
540
+ "Ġwal": 495,
541
+ "Ġex": 496,
542
+ "Ġwor": 497,
543
+ "Ġco": 498,
544
+ "Ġfind": 499,
545
+ "ag": 500,
546
+ "ty": 501,
547
+ "Ġaway": 502,
548
+ "Ġsomething": 503,
549
+ "Ġwhat": 504,
550
+ "Ġfrom": 505,
551
+ "Ġmade": 506,
552
+ "Ġput": 507,
553
+ "iled": 508,
554
+ "Ġhome": 509,
555
+ "Ġthought": 510,
556
+ "Ġplaying": 511,
557
+ "Ġsay": 512,
558
+ "ĠY": 513,
559
+ "ĠBen": 514,
560
+ "Ġmu": 515,
561
+ "uc": 516,
562
+ "ach": 517,
563
+ "arn": 518,
564
+ "Ġran": 519,
565
+ "Ġfl": 520,
566
+ "ile": 521,
567
+ "ĠSam": 522,
568
+ "ie": 523,
569
+ "Ġevery": 524,
570
+ "ny": 525,
571
+ "Ġagain": 526,
572
+ "ook": 527,
573
+ "dd": 528,
574
+ "ew": 529,
575
+ "Ġdown": 530,
576
+ "Ġpr": 531,
577
+ "Ġsome": 532,
578
+ "Ġtook": 533,
579
+ "Ġscared": 534,
580
+ "Ġliked": 535,
581
+ "Ġtoys": 536,
582
+ "king": 537,
583
+ "Ġlearn": 538,
584
+ "Ġhouse": 539,
585
+ "ure": 540,
586
+ "Ġwill": 541,
587
+ "Ġbox": 542,
588
+ "ep": 543,
589
+ "ret": 544,
590
+ "if": 545,
591
+ "Ġbl": 546,
592
+ "Ġmy": 547,
593
+ "Ġthings": 548,
594
+ "Ġaround": 549,
595
+ "ble": 550,
596
+ "Ġyour": 551,
597
+ "Ġsw": 552,
598
+ "ish": 553,
599
+ "Ġwhen": 554,
600
+ "ĠSo": 555,
601
+ "Ġlived": 556,
602
+ "pped": 557,
603
+ "Ġsun": 558,
604
+ "Ġthen": 559,
605
+ "ank": 560,
606
+ "oud": 561,
607
+ "Ġch": 562,
608
+ ",\"": 563,
609
+ "Ġlaug": 564,
610
+ "us": 565,
611
+ "ick": 566,
612
+ "Th": 567,
613
+ "uck": 568,
614
+ "ĠD": 569,
615
+ "Ġta": 570,
616
+ "Ġget": 571,
617
+ "ucy": 572,
618
+ "ist": 573,
619
+ "Ġsmiled": 574,
620
+ "Ġdec": 575,
621
+ "ĠLucy": 576,
622
+ "Ġfe": 577,
623
+ "Ġtried": 578,
624
+ "oth": 579,
625
+ "ily": 580,
626
+ "Ġgot": 581,
627
+ "Ġknow": 582,
628
+ "Ġwho": 583,
629
+ "ave": 584,
630
+ "qu": 585,
631
+ "Ġab": 586,
632
+ "Ġmany": 587,
633
+ "Ġany": 588,
634
+ "au": 589,
635
+ "ause": 590,
636
+ "na": 591,
637
+ "Ġint": 592,
638
+ "ited": 593,
639
+ "Ġabout": 594,
640
+ "ous": 595,
641
+ "ĠSpot": 596,
642
+ "Ġpret": 597,
643
+ "Ġlot": 598,
644
+ "Ġv": 599,
645
+ "ers": 600,
646
+ "Ġred": 601,
647
+ "ĠE": 602,
648
+ "ise": 603,
649
+ "ust": 604,
650
+ "Ġmore": 605,
651
+ "ided": 606,
652
+ "Ġpic": 607,
653
+ "nder": 608,
654
+ "Ġcare": 609,
655
+ "ace": 610,
656
+ "Ġpo": 611,
657
+ "as": 612,
658
+ "urp": 613,
659
+ "Ġlearned": 614,
660
+ "Ġun": 615,
661
+ "Ġwater": 616,
662
+ "Ġhug": 617,
663
+ "Ġexc": 618,
664
+ "Ġbest": 619,
665
+ "ump": 620,
666
+ "Ġop": 621,
667
+ "ap": 622,
668
+ "Ġgre": 623,
669
+ "ways": 624,
670
+ "âĢ": 625,
671
+ "Ġoutside": 626,
672
+ "ĠMia": 627,
673
+ "fe": 628,
674
+ "Ġalways": 629,
675
+ "ant": 630,
676
+ "Ġroom": 631,
677
+ "Ġho": 632,
678
+ "Ġinto": 633,
679
+ "ive": 634,
680
+ "Ġeat": 635,
681
+ "ĠMom": 636,
682
+ "ĠAn": 637,
683
+ "Ġpe": 638,
684
+ "ite": 639,
685
+ "Ġboth": 640,
686
+ "ĠFr": 641,
687
+ "ĠYou": 642,
688
+ "Ġdad": 643,
689
+ "Ġke": 644,
690
+ "ally": 645,
691
+ "Ġlook": 646,
692
+ "Ġone": 647,
693
+ "Ġshow": 648,
694
+ "Ġthis": 649,
695
+ "Ġexcited": 650,
696
+ "Ġfast": 651,
697
+ "Ġnice": 652,
698
+ "Ġsurp": 653,
699
+ "Ġrun": 654,
700
+ "Ġlong": 655,
701
+ "ĠC": 656,
702
+ "Ġam": 657,
703
+ "fter": 658,
704
+ "als": 659,
705
+ "Ġsk": 660,
706
+ "Ġjump": 661,
707
+ "Ġtold": 662,
708
+ "Ġfeel": 663,
709
+ "Ġmo": 664,
710
+ "Ġinside": 665,
711
+ "Ġtr": 666,
712
+ "our": 667,
713
+ "ull": 668,
714
+ "Ġsl": 669,
715
+ "Ġpretty": 670,
716
+ "iny": 671,
717
+ "ĠBob": 672,
718
+ "Ġgra": 673,
719
+ "udd": 674,
720
+ "ged": 675,
721
+ "Ġeach": 676,
722
+ "Ġgave": 677,
723
+ "Ġmuch": 678,
724
+ "bb": 679,
725
+ "Ġsays": 680,
726
+ "Ġstr": 681,
727
+ "Ġhow": 682,
728
+ "ink": 683,
729
+ "og": 684,
730
+ "Ġtow": 685,
731
+ "Ġor": 686,
732
+ "ĠAs": 687,
733
+ "lew": 688,
734
+ "Ġneed": 689,
735
+ "etter": 690,
736
+ "Ġunder": 691,
737
+ "ised": 692,
738
+ "ven": 693,
739
+ "Ġold": 694,
740
+ "Ġrock": 695,
741
+ "Ġtake": 696,
742
+ "ess": 697,
743
+ "Ġknew": 698,
744
+ "ro": 699,
745
+ "ĠTh": 700,
746
+ "Ġca": 701,
747
+ "Ġcle": 702,
748
+ "Ġfish": 703,
749
+ "here": 704,
750
+ "Ġlist": 705,
751
+ "Ġclo": 706,
752
+ "urt": 707,
753
+ "ĠAnd": 708,
754
+ "Ġbear": 709,
755
+ "ched": 710,
756
+ "Ġhand": 711,
757
+ "ara": 712,
758
+ "Ġtry": 713,
759
+ "Ġbecame": 714,
760
+ "Ġthan": 715,
761
+ "ĠWe": 716,
762
+ "Ġkind": 717,
763
+ "est": 718,
764
+ "urn": 719,
765
+ "Ġlaughed": 720,
766
+ "Ġjust": 721,
767
+ "orry": 722,
768
+ "Ġfood": 723,
769
+ "more": 724,
770
+ "ge": 725,
771
+ "Ġhig": 726,
772
+ "Ġfi": 727,
773
+ "Ġwat": 728,
774
+ "ĠIn": 729,
775
+ "ine": 730,
776
+ "Ġsurpr": 731,
777
+ "ak": 732,
778
+ "Ġdecided": 733,
779
+ "Ġsky": 734,
780
+ "Ġbecause": 735,
781
+ "Ġide": 736,
782
+ "Ġus": 737,
783
+ "ving": 738,
784
+ "��near": 739,
785
+ "Ġtw": 740,
786
+ "Ġidea": 741,
787
+ "Ġbetter": 742,
788
+ "Ġbug": 743,
789
+ "Ġheard": 744,
790
+ "gry": 745,
791
+ "Ġits": 746,
792
+ "ase": 747,
793
+ "ĠThen": 748,
794
+ "Ġen": 749,
795
+ "ack": 750,
796
+ "ate": 751,
797
+ "Wh": 752,
798
+ "lf": 753,
799
+ "Ġlet": 754,
800
+ "ort": 755,
801
+ "ĠK": 756,
802
+ "by": 757,
803
+ "Ġanim": 758,
804
+ "ĠWhen": 759,
805
+ "ff": 760,
806
+ "Ġshare": 761,
807
+ "Ġcareful": 762,
808
+ "Ġcol": 763,
809
+ "Ġif": 764,
810
+ "Ġte": 765,
811
+ "eci": 766,
812
+ "Ġgr": 767,
813
+ "Ġfly": 768,
814
+ "Ġfo": 769,
815
+ "Ġstor": 770,
816
+ "Ġflew": 771,
817
+ "ves": 772,
818
+ "Ġdan": 773,
819
+ "Yes": 774,
820
+ "Ġcom": 775,
821
+ "ecial": 776,
822
+ "Ġspecial": 777,
823
+ "ion": 778,
824
+ "Ġflow": 779,
825
+ "Ġnever": 780,
826
+ "Ġby": 781,
827
+ "ected": 782,
828
+ "Ġbu": 783,
829
+ "Ġdon": 784,
830
+ "Ġra": 785,
831
+ "Ġwind": 786,
832
+ "Ġtal": 787,
833
+ "Ġclean": 788,
834
+ "rm": 789,
835
+ "Ġqu": 790,
836
+ "Ġdre": 791,
837
+ "ĠHis": 792,
838
+ "Ġend": 793,
839
+ "Ġeven": 794,
840
+ "ople": 795,
841
+ "Ġsafe": 796,
842
+ "Ġmag": 797,
843
+ "Ġunex": 798,
844
+ "Ġunexp": 799,
845
+ "Ġshiny": 800,
846
+ "Ġhard": 801,
847
+ "Ġover": 802,
848
+ "Ġbook": 803,
849
+ "ĠWh": 804,
850
+ "Ġturn": 805,
851
+ "ĠSara": 806,
852
+ "Ġfam": 807,
853
+ "Ġfore": 808,
854
+ "Ġbad": 809,
855
+ "Ġcake": 810,
856
+ "Ġafter": 811,
857
+ "ady": 812,
858
+ "Ġpeople": 813,
859
+ "Ġcu": 814,
860
+ "pl": 815,
861
+ "ĠP": 816,
862
+ "Ġproud": 817,
863
+ "ber": 818,
864
+ "Ġhigh": 819,
865
+ "Ġgl": 820,
866
+ "Ġche": 821,
867
+ "ĠAnna": 822,
868
+ "ĠAmy": 823,
869
+ "Ġcome": 824,
870
+ "Ġgard": 825,
871
+ "Ġdoor": 826,
872
+ "Ġopened": 827,
873
+ "Ġground": 828,
874
+ "Ġgarden": 829,
875
+ "Ġpicked": 830,
876
+ "Ġim": 831,
877
+ "Ġhurt": 832,
878
+ "Ġloud": 833,
879
+ "ĠN": 834,
880
+ "Ġsto": 835,
881
+ "ild": 836,
882
+ "Ġsorry": 837,
883
+ "Ġway": 838,
884
+ "ip": 839,
885
+ "'m": 840,
886
+ "Ġblue": 841,
887
+ "Ġstill": 842,
888
+ "Ġever": 843,
889
+ "Ġhugged": 844,
890
+ "Ġfar": 845,
891
+ "Ġcall": 846,
892
+ "Ġpa": 847,
893
+ "Ġlove": 848,
894
+ "age": 849,
895
+ "hes": 850,
896
+ "Ġoff": 851,
897
+ "Ġmagic": 852,
898
+ "ough": 853,
899
+ "Ġwalked": 854,
900
+ "Ġsn": 855,
901
+ "udden": 856,
902
+ "Ġpar": 857,
903
+ "Ġfamily": 858,
904
+ "Ġshould": 859,
905
+ "ool": 860,
906
+ "be": 861,
907
+ "Ġclim": 862,
908
+ "iz": 863,
909
+ "Ġkid": 864,
910
+ "ies": 865,
911
+ "dy": 866,
912
+ "Ġma": 867,
913
+ "Ġnow": 868,
914
+ "Ġhappened": 869,
915
+ "Ġgreat": 870,
916
+ "ĠHer": 871,
917
+ "Ġdidn": 872,
918
+ "em": 873,
919
+ "Ġplace": 874,
920
+ "Ġanimals": 875,
921
+ "Ġstrong": 876,
922
+ "uff": 877,
923
+ "Ġunt": 878,
924
+ "uddenly": 879,
925
+ "ĠJo": 880,
926
+ "Ġwalk": 881,
927
+ "ct": 882,
928
+ "Ġforest": 883,
929
+ "Ġbra": 884,
930
+ "No": 885,
931
+ "Ġclimb": 886,
932
+ "aut": 887,
933
+ "ĠFrom": 888,
934
+ "Ġgive": 889,
935
+ "Ġbro": 890,
936
+ "Ġwo": 891,
937
+ "Ġsqu": 892,
938
+ "Ġuntil": 893,
939
+ "Ġbeaut": 894,
940
+ "Ġapp": 895,
941
+ "ĠEvery": 896,
942
+ "Ġnext": 897,
943
+ "Ġfrog": 898,
944
+ "Ġboat": 899,
945
+ "ning": 900,
946
+ "Ġstick": 901,
947
+ "Ġjo": 902,
948
+ "Ġtra": 903,
949
+ "Ġimp": 904,
950
+ "Ġpict": 905,
951
+ "Ġkids": 906,
952
+ "Ġstay": 907,
953
+ "Ġbeauti": 908,
954
+ "Ġbeautiful": 909,
955
+ "Ġclos": 910,
956
+ "Thank": 911,
957
+ "ĠR": 912,
958
+ "Ġdra": 913,
959
+ "Ġlea": 914,
960
+ "Ġlisten": 915,
961
+ "Ġstory": 916,
962
+ "Ġanymore": 917,
963
+ "ummy": 918,
964
+ "Ġwhile": 919,
965
+ "pt": 920,
966
+ "rel": 921,
967
+ "Ġrem": 922,
968
+ "Ġtown": 923,
969
+ "ane": 924,
970
+ "Ġcre": 925,
971
+ "Ġlots": 926,
972
+ "ĠAt": 927,
973
+ "kay": 928,
974
+ "Ġad": 929,
975
+ "cks": 930,
976
+ "ary": 931,
977
+ "Ġbeing": 932,
978
+ "Ġrain": 933,
979
+ "Ġmor": 934,
980
+ "unny": 935,
981
+ "ors": 936,
982
+ "Ġcry": 937,
983
+ "Ġcalled": 938,
984
+ "th": 939,
985
+ "and": 940,
986
+ "Ġmet": 941,
987
+ "les": 942,
988
+ "Ġgame": 943,
989
+ "Ġangry": 944,
990
+ "ger": 945,
991
+ "Ġhat": 946,
992
+ "lease": 947,
993
+ "Ġthre": 948,
994
+ "self": 949,
995
+ "Ġsoft": 950,
996
+ "ma": 951,
997
+ "uffy": 952,
998
+ "Ġkeep": 953,
999
+ "Ġbed": 954,
1000
+ "Ġlost": 955,
1001
+ "Ġtruck": 956,
1002
+ "Ġopen": 957,
1003
+ "Ġwarm": 958,
1004
+ "Ġmouse": 959,
1005
+ "oon": 960,
1006
+ "Ġate": 961,
1007
+ "It": 962,
1008
+ "fore": 963,
1009
+ "Ġbrave": 964,
1010
+ "so": 965,
1011
+ "Ġwatch": 966,
1012
+ "ĠG": 967,
1013
+ "Ġsurprised": 968,
1014
+ "Let": 969,
1015
+ "ect": 970,
1016
+ "Ġalso": 971,
1017
+ "Ġfell": 972,
1018
+ "Ġgreen": 973,
1019
+ "Ġface": 974,
1020
+ "Ġtwo": 975,
1021
+ "ĠFl": 976,
1022
+ "Ġunexpected": 977,
1023
+ "ĠKitt": 978,
1024
+ "You": 979,
1025
+ "Ġpicture": 980,
1026
+ "ĠâĢ": 981,
1027
+ "vent": 982,
1028
+ "Ġnoise": 983,
1029
+ "bbit": 984,
1030
+ "gan": 985,
1031
+ "irst": 986,
1032
+ "Ġwhere": 987,
1033
+ "led": 988,
1034
+ "Ġthanked": 989,
1035
+ "Ġsoon": 990,
1036
+ "kes": 991,
1037
+ "Ġslide": 992,
1038
+ "Ġsmile": 993,
1039
+ "Ġhole": 994,
1040
+ "Ġpus": 995,
1041
+ "day": 996,
1042
+ "Ġbefore": 997,
1043
+ "Ġrabbit": 998,
1044
+ "ired": 999,
1045
+ "Ġsat": 1000,
1046
+ "Ġdoll": 1001,
1047
+ "ĠFluffy": 1002,
1048
+ "irrel": 1003,
1049
+ "Ġkept": 1004,
1050
+ "ĠâĢľ": 1005,
1051
+ "Hi": 1006,
1052
+ "âĢĿ": 1007,
1053
+ "iss": 1008,
1054
+ "owl": 1009,
1055
+ "Ġfin": 1010,
1056
+ "ear": 1011,
1057
+ "Ġsquirrel": 1012,
1058
+ "Ġjumped": 1013,
1059
+ "Ġblo": 1014,
1060
+ "Ġuse": 1015,
1061
+ "able": 1016,
1062
+ "Ġstore": 1017,
1063
+ "Ġhands": 1018,
1064
+ "Ġbirds": 1019,
1065
+ "Ġbre": 1020,
1066
+ "Ġpain": 1021,
1067
+ "Ġgoing": 1022,
1068
+ "Ġfunny": 1023
1069
+ },
1070
+ "merges": [
1071
+ "Ġ t",
1072
+ "Ġ a",
1073
+ "h e",
1074
+ "Ġ s",
1075
+ "n d",
1076
+ "Ġ w",
1077
+ "Ġt he",
1078
+ "e d",
1079
+ "Ġ b",
1080
+ "Ġ T",
1081
+ "Ġt o",
1082
+ "Ġa nd",
1083
+ "Ġ h",
1084
+ "Ġ f",
1085
+ "i n",
1086
+ "Ġw a",
1087
+ "r e",
1088
+ "i t",
1089
+ "o u",
1090
+ "Ġ l",
1091
+ "Ġ d",
1092
+ "Ġ c",
1093
+ "Ġ p",
1094
+ "a y",
1095
+ "Ġ m",
1096
+ "e r",
1097
+ "Ġwa s",
1098
+ "o m",
1099
+ "Ġ he",
1100
+ "i s",
1101
+ "ĠT he",
1102
+ "i m",
1103
+ "Ġ n",
1104
+ "o n",
1105
+ "Ġ S",
1106
+ "a r",
1107
+ "Ġs a",
1108
+ "l l",
1109
+ "i d",
1110
+ "Ġh a",
1111
+ "a t",
1112
+ "Ġ g",
1113
+ "in g",
1114
+ "e n",
1115
+ "o t",
1116
+ "a n",
1117
+ "l e",
1118
+ "e nd",
1119
+ "o r",
1120
+ "Ġ \"",
1121
+ "o f",
1122
+ "Ġ H",
1123
+ "i r",
1124
+ "a m",
1125
+ "e t",
1126
+ "Ġ it",
1127
+ "Ġt h",
1128
+ "i g",
1129
+ "ĠH e",
1130
+ "Ġ in",
1131
+ "Ġp l",
1132
+ "Ġ O",
1133
+ "l y",
1134
+ "r i",
1135
+ "v er",
1136
+ "u t",
1137
+ "o w",
1138
+ "Ġb e",
1139
+ "Ġ u",
1140
+ "Ġsa id",
1141
+ "Ġpl ay",
1142
+ "it h",
1143
+ "Ġw ith",
1144
+ "Ġd ay",
1145
+ "Ġ y",
1146
+ "k ed",
1147
+ "p p",
1148
+ "e x",
1149
+ "Ġ r",
1150
+ "c e",
1151
+ "l d",
1152
+ "o o",
1153
+ "Ġ I",
1154
+ "ĠThe y",
1155
+ "Ġhe r",
1156
+ "Ġ L",
1157
+ "Ġh is",
1158
+ "Ġs t",
1159
+ "e s",
1160
+ "ĠO n",
1161
+ "Ġ B",
1162
+ "c k",
1163
+ "Ġb ig",
1164
+ "n t",
1165
+ "Ġy ou",
1166
+ "k e",
1167
+ "ex t",
1168
+ "ver y",
1169
+ "of t",
1170
+ "Ġ M",
1171
+ "Ġ on",
1172
+ "u n",
1173
+ "Ġha pp",
1174
+ "s t",
1175
+ "v e",
1176
+ "Ġf ri",
1177
+ "Ġfri end",
1178
+ "ĠT im",
1179
+ "Ġl i",
1180
+ "ĠL i",
1181
+ "Ġthe y",
1182
+ "a ll",
1183
+ "Ġw e",
1184
+ "Ġha d",
1185
+ "Ġn ot",
1186
+ "Ġu p",
1187
+ "Ġwa nt",
1188
+ "Ġ of",
1189
+ "he r",
1190
+ "ĠS he",
1191
+ "a d",
1192
+ "Ġ <",
1193
+ "| >",
1194
+ "Ġ< |",
1195
+ "it t",
1196
+ "Ġd o",
1197
+ "Ġ e",
1198
+ "s e",
1199
+ "Ġ A",
1200
+ "Ġhapp y",
1201
+ "Ġ very",
1202
+ "en t",
1203
+ "Ġth at",
1204
+ "Ġsa w",
1205
+ "' s",
1206
+ "Ġf or",
1207
+ "Ġs h",
1208
+ "Ġm om",
1209
+ "itt le",
1210
+ "ou ld",
1211
+ "Ġl ittle",
1212
+ "end oft",
1213
+ "Ġs he",
1214
+ "Ġ k",
1215
+ "im e",
1216
+ "c h",
1217
+ ". \"",
1218
+ "Ġn am",
1219
+ "Ġt ime",
1220
+ "Ġs m",
1221
+ "Ġs o",
1222
+ "ou nd",
1223
+ "Ġthe re",
1224
+ "Ġnam ed",
1225
+ "Ġb o",
1226
+ "Ġwe re",
1227
+ "Ġwant ed",
1228
+ "Ġb ut",
1229
+ "v ed",
1230
+ "Ġfriend s",
1231
+ "ou t",
1232
+ "h t",
1233
+ "! \"",
1234
+ "endoft ext",
1235
+ "Ġn e",
1236
+ "Ġa n",
1237
+ "ir d",
1238
+ "a l",
1239
+ "Ġb ird",
1240
+ "u e",
1241
+ "ĠI t",
1242
+ "Ġto o",
1243
+ "Ġ Ċ",
1244
+ "i l",
1245
+ "Ġhe l",
1246
+ "e l",
1247
+ "id e",
1248
+ "om e",
1249
+ "Ġw ent",
1250
+ "Ġw h",
1251
+ "r y",
1252
+ "ĠLi ly",
1253
+ "ĠOn e",
1254
+ "Ġ is",
1255
+ "Ġa ll",
1256
+ "Ġl o",
1257
+ "a ke",
1258
+ "Ġl oo",
1259
+ "Ġup on",
1260
+ "t er",
1261
+ "ĠT om",
1262
+ "u g",
1263
+ "Ġhel p",
1264
+ "o re",
1265
+ "am e",
1266
+ "Ġf un",
1267
+ "i nd",
1268
+ "Ġto y",
1269
+ "Ġa s",
1270
+ "g et",
1271
+ "Ġ j",
1272
+ "Ġa t",
1273
+ "Ġ re",
1274
+ "r a",
1275
+ "get her",
1276
+ "Ġ o",
1277
+ "Ġs e",
1278
+ "i ll",
1279
+ "u r",
1280
+ "Ġd id",
1281
+ "Ġto gether",
1282
+ "Ġb a",
1283
+ "Ġc at",
1284
+ "Ġt re",
1285
+ "oo d",
1286
+ "t ed",
1287
+ "i c",
1288
+ "m y",
1289
+ "Ġdo g",
1290
+ "Ġc an",
1291
+ "e c",
1292
+ "Ġc ould",
1293
+ "Ġthe ir",
1294
+ "ar d",
1295
+ "he d",
1296
+ "a x",
1297
+ "Ġg ir",
1298
+ "Ġh im",
1299
+ "Ġr o",
1300
+ "Ġplay ed",
1301
+ "Ġb all",
1302
+ "ar k",
1303
+ "Ġk n",
1304
+ "? \"",
1305
+ "Ġgir l",
1306
+ "ĠOn ce",
1307
+ "w ay",
1308
+ "Ġg o",
1309
+ "Ġl e",
1310
+ "Ġa re",
1311
+ "Ġf r",
1312
+ "Ġ W",
1313
+ "Ġ out",
1314
+ "a in",
1315
+ "' t",
1316
+ "Ġthe m",
1317
+ "Ġsa d",
1318
+ "u l",
1319
+ "u m",
1320
+ "Ġbo y",
1321
+ "Ġtre e",
1322
+ "Ġc l",
1323
+ "Ġha ve",
1324
+ "on e",
1325
+ "Ġm an",
1326
+ "he n",
1327
+ "Ġloo ked",
1328
+ "Ġlo ved",
1329
+ "Ġf ound",
1330
+ "ot her",
1331
+ "Ġs p",
1332
+ "ou g",
1333
+ "Ġs c",
1334
+ "Ġst ar",
1335
+ "Ġbe c",
1336
+ "h ing",
1337
+ "Ġba ck",
1338
+ "Ġ J",
1339
+ "f ul",
1340
+ "Ġl a",
1341
+ "ow n",
1342
+ "s ide",
1343
+ "Ġm e",
1344
+ "a re",
1345
+ "Ġp ark",
1346
+ "Ġc ar",
1347
+ "on g",
1348
+ "ig ht",
1349
+ "el t",
1350
+ "Ġw ould",
1351
+ "o p",
1352
+ "r ound",
1353
+ "Ġf a",
1354
+ "Ġli ke",
1355
+ "Ġf elt",
1356
+ "Ġse e",
1357
+ "Ġn o",
1358
+ "Ġ F",
1359
+ "Ġas ked",
1360
+ "om et",
1361
+ "Ġne w",
1362
+ "ĠS p",
1363
+ "Ġstar ted",
1364
+ "ar ed",
1365
+ "Ġc ame",
1366
+ "i ce",
1367
+ "ou se",
1368
+ "Ġa l",
1369
+ "ĠB ut",
1370
+ "i a",
1371
+ "s s",
1372
+ "Ġm ake",
1373
+ "Ġs omet",
1374
+ "oug ht",
1375
+ "Ġ other",
1376
+ "Ġa g",
1377
+ "Ġb r",
1378
+ "Ġg ood",
1379
+ "Ġsm all",
1380
+ "ĠS ue",
1381
+ "ad e",
1382
+ "o b",
1383
+ "e ll",
1384
+ "ing s",
1385
+ "ri ed",
1386
+ "ĠM ax",
1387
+ "en ed",
1388
+ "Ġwa l",
1389
+ "Ġ ex",
1390
+ "Ġw or",
1391
+ "Ġc o",
1392
+ "Ġf ind",
1393
+ "a g",
1394
+ "t y",
1395
+ "Ġa way",
1396
+ "Ġsomet hing",
1397
+ "Ġwh at",
1398
+ "Ġfr om",
1399
+ "Ġm ade",
1400
+ "Ġp ut",
1401
+ "il ed",
1402
+ "Ġh ome",
1403
+ "Ġth ought",
1404
+ "Ġplay ing",
1405
+ "Ġs ay",
1406
+ "Ġ Y",
1407
+ "ĠB en",
1408
+ "Ġm u",
1409
+ "u c",
1410
+ "a ch",
1411
+ "ar n",
1412
+ "Ġr an",
1413
+ "Ġf l",
1414
+ "i le",
1415
+ "ĠS am",
1416
+ "i e",
1417
+ "Ġe very",
1418
+ "n y",
1419
+ "Ġag ain",
1420
+ "oo k",
1421
+ "d d",
1422
+ "e w",
1423
+ "Ġd own",
1424
+ "Ġp r",
1425
+ "Ġs ome",
1426
+ "Ġtoo k",
1427
+ "Ġsc ared",
1428
+ "Ġli ked",
1429
+ "Ġtoy s",
1430
+ "k ing",
1431
+ "Ġle arn",
1432
+ "Ġh ouse",
1433
+ "u re",
1434
+ "Ġw ill",
1435
+ "Ġbo x",
1436
+ "e p",
1437
+ "re t",
1438
+ "i f",
1439
+ "Ġb l",
1440
+ "Ġm y",
1441
+ "Ġth ings",
1442
+ "Ġa round",
1443
+ "b le",
1444
+ "Ġyou r",
1445
+ "Ġs w",
1446
+ "is h",
1447
+ "Ġw hen",
1448
+ "ĠS o",
1449
+ "Ġli ved",
1450
+ "pp ed",
1451
+ "Ġs un",
1452
+ "Ġthe n",
1453
+ "an k",
1454
+ "ou d",
1455
+ "Ġc h",
1456
+ ", \"",
1457
+ "Ġla ug",
1458
+ "u s",
1459
+ "i ck",
1460
+ "T h",
1461
+ "u ck",
1462
+ "Ġ D",
1463
+ "Ġt a",
1464
+ "Ġg et",
1465
+ "uc y",
1466
+ "is t",
1467
+ "Ġsm iled",
1468
+ "Ġd ec",
1469
+ "ĠL ucy",
1470
+ "Ġf e",
1471
+ "Ġt ried",
1472
+ "ot h",
1473
+ "i ly",
1474
+ "Ġg ot",
1475
+ "Ġkn ow",
1476
+ "Ġwh o",
1477
+ "a ve",
1478
+ "q u",
1479
+ "Ġa b",
1480
+ "Ġman y",
1481
+ "Ġan y",
1482
+ "a u",
1483
+ "au se",
1484
+ "n a",
1485
+ "Ġin t",
1486
+ "it ed",
1487
+ "Ġab out",
1488
+ "ou s",
1489
+ "ĠSp ot",
1490
+ "Ġp ret",
1491
+ "Ġl ot",
1492
+ "Ġ v",
1493
+ "er s",
1494
+ "Ġr ed",
1495
+ "Ġ E",
1496
+ "is e",
1497
+ "u st",
1498
+ "Ġm ore",
1499
+ "id ed",
1500
+ "Ġp ic",
1501
+ "nd er",
1502
+ "Ġc are",
1503
+ "a ce",
1504
+ "Ġp o",
1505
+ "a s",
1506
+ "ur p",
1507
+ "Ġlearn ed",
1508
+ "Ġu n",
1509
+ "Ġwa ter",
1510
+ "Ġh ug",
1511
+ "Ġex c",
1512
+ "Ġbe st",
1513
+ "um p",
1514
+ "Ġo p",
1515
+ "a p",
1516
+ "Ġg re",
1517
+ "way s",
1518
+ "â Ģ",
1519
+ "Ġout side",
1520
+ "ĠM ia",
1521
+ "f e",
1522
+ "Ġal ways",
1523
+ "an t",
1524
+ "Ġro om",
1525
+ "Ġh o",
1526
+ "Ġint o",
1527
+ "i ve",
1528
+ "Ġe at",
1529
+ "ĠM om",
1530
+ "ĠA n",
1531
+ "Ġp e",
1532
+ "it e",
1533
+ "Ġb oth",
1534
+ "ĠF r",
1535
+ "ĠY ou",
1536
+ "Ġd ad",
1537
+ "Ġ ke",
1538
+ "all y",
1539
+ "Ġloo k",
1540
+ "Ġon e",
1541
+ "Ġsh ow",
1542
+ "Ġth is",
1543
+ "Ġexc ited",
1544
+ "Ġfa st",
1545
+ "Ġn ice",
1546
+ "Ġs urp",
1547
+ "Ġr un",
1548
+ "Ġl ong",
1549
+ "Ġ C",
1550
+ "Ġa m",
1551
+ "f ter",
1552
+ "al s",
1553
+ "Ġs k",
1554
+ "Ġj ump",
1555
+ "Ġto ld",
1556
+ "Ġfe el",
1557
+ "Ġm o",
1558
+ "Ġin side",
1559
+ "Ġt r",
1560
+ "ou r",
1561
+ "u ll",
1562
+ "Ġs l",
1563
+ "Ġpret ty",
1564
+ "in y",
1565
+ "ĠB ob",
1566
+ "Ġg ra",
1567
+ "u dd",
1568
+ "g ed",
1569
+ "Ġe ach",
1570
+ "Ġg ave",
1571
+ "Ġmu ch",
1572
+ "b b",
1573
+ "Ġsay s",
1574
+ "Ġst r",
1575
+ "Ġh ow",
1576
+ "in k",
1577
+ "o g",
1578
+ "Ġto w",
1579
+ "Ġ or",
1580
+ "ĠA s",
1581
+ "le w",
1582
+ "Ġne ed",
1583
+ "et ter",
1584
+ "Ġu nder",
1585
+ "is ed",
1586
+ "v en",
1587
+ "Ġo ld",
1588
+ "Ġro ck",
1589
+ "Ġt ake",
1590
+ "es s",
1591
+ "Ġkn ew",
1592
+ "r o",
1593
+ "ĠT h",
1594
+ "Ġc a",
1595
+ "Ġc le",
1596
+ "Ġf ish",
1597
+ "he re",
1598
+ "Ġl ist",
1599
+ "Ġcl o",
1600
+ "ur t",
1601
+ "ĠA nd",
1602
+ "Ġbe ar",
1603
+ "c hed",
1604
+ "Ġha nd",
1605
+ "ar a",
1606
+ "Ġt ry",
1607
+ "Ġbec ame",
1608
+ "Ġth an",
1609
+ "ĠW e",
1610
+ "Ġk ind",
1611
+ "es t",
1612
+ "ur n",
1613
+ "Ġlaug hed",
1614
+ "Ġj ust",
1615
+ "or ry",
1616
+ "Ġf ood",
1617
+ "m ore",
1618
+ "g e",
1619
+ "Ġh ig",
1620
+ "Ġf i",
1621
+ "Ġwa t",
1622
+ "ĠI n",
1623
+ "in e",
1624
+ "Ġsurp r",
1625
+ "a k",
1626
+ "Ġdec ided",
1627
+ "Ġsk y",
1628
+ "Ġbec ause",
1629
+ "Ġ ide",
1630
+ "Ġu s",
1631
+ "v ing",
1632
+ "Ġne ar",
1633
+ "Ġt w",
1634
+ "Ġide a",
1635
+ "Ġb etter",
1636
+ "Ġb ug",
1637
+ "Ġhe ard",
1638
+ "g ry",
1639
+ "Ġit s",
1640
+ "a se",
1641
+ "ĠThe n",
1642
+ "Ġ en",
1643
+ "a ck",
1644
+ "at e",
1645
+ "W h",
1646
+ "l f",
1647
+ "Ġl et",
1648
+ "or t",
1649
+ "Ġ K",
1650
+ "b y",
1651
+ "Ġan im",
1652
+ "ĠW hen",
1653
+ "f f",
1654
+ "Ġsh are",
1655
+ "Ġcare ful",
1656
+ "Ġco l",
1657
+ "Ġ if",
1658
+ "Ġt e",
1659
+ "ec i",
1660
+ "Ġg r",
1661
+ "Ġf ly",
1662
+ "Ġf o",
1663
+ "Ġst or",
1664
+ "Ġf lew",
1665
+ "v es",
1666
+ "Ġd an",
1667
+ "Y es",
1668
+ "Ġc om",
1669
+ "eci al",
1670
+ "Ġsp ecial",
1671
+ "i on",
1672
+ "Ġfl ow",
1673
+ "Ġne ver",
1674
+ "Ġb y",
1675
+ "ec ted",
1676
+ "Ġb u",
1677
+ "Ġd on",
1678
+ "Ġr a",
1679
+ "Ġw ind",
1680
+ "Ġt al",
1681
+ "Ġcle an",
1682
+ "r m",
1683
+ "Ġ qu",
1684
+ "Ġd re",
1685
+ "ĠH is",
1686
+ "Ġ end",
1687
+ "Ġe ven",
1688
+ "op le",
1689
+ "Ġsa fe",
1690
+ "Ġm ag",
1691
+ "Ġun ex",
1692
+ "Ġunex p",
1693
+ "Ġsh iny",
1694
+ "Ġh ard",
1695
+ "Ġo ver",
1696
+ "Ġb ook",
1697
+ "ĠW h",
1698
+ "Ġt urn",
1699
+ "ĠS ara",
1700
+ "Ġf am",
1701
+ "Ġf ore",
1702
+ "Ġb ad",
1703
+ "Ġc ake",
1704
+ "Ġa fter",
1705
+ "ad y",
1706
+ "Ġpe ople",
1707
+ "Ġc u",
1708
+ "p l",
1709
+ "Ġ P",
1710
+ "Ġpr oud",
1711
+ "b er",
1712
+ "Ġhig h",
1713
+ "Ġg l",
1714
+ "Ġc he",
1715
+ "ĠAn na",
1716
+ "ĠA my",
1717
+ "Ġc ome",
1718
+ "Ġg ard",
1719
+ "Ġdo or",
1720
+ "Ġop ened",
1721
+ "Ġg round",
1722
+ "Ġgard en",
1723
+ "Ġpic ked",
1724
+ "Ġ im",
1725
+ "Ġh urt",
1726
+ "Ġl oud",
1727
+ "Ġ N",
1728
+ "Ġst o",
1729
+ "i ld",
1730
+ "Ġs orry",
1731
+ "Ġwa y",
1732
+ "i p",
1733
+ "' m",
1734
+ "Ġbl ue",
1735
+ "Ġst ill",
1736
+ "Ġe ver",
1737
+ "Ġhug ged",
1738
+ "Ġf ar",
1739
+ "Ġc all",
1740
+ "Ġp a",
1741
+ "Ġlo ve",
1742
+ "ag e",
1743
+ "he s",
1744
+ "Ġof f",
1745
+ "Ġmag ic",
1746
+ "oug h",
1747
+ "Ġwal ked",
1748
+ "Ġs n",
1749
+ "udd en",
1750
+ "Ġp ar",
1751
+ "Ġfam ily",
1752
+ "Ġsh ould",
1753
+ "oo l",
1754
+ "b e",
1755
+ "Ġcl im",
1756
+ "i z",
1757
+ "Ġk id",
1758
+ "i es",
1759
+ "d y",
1760
+ "Ġm a",
1761
+ "Ġn ow",
1762
+ "Ġhapp ened",
1763
+ "Ġgre at",
1764
+ "ĠH er",
1765
+ "Ġdid n",
1766
+ "e m",
1767
+ "Ġpl ace",
1768
+ "Ġanim als",
1769
+ "Ġstr ong",
1770
+ "u ff",
1771
+ "Ġu nt",
1772
+ "udden ly",
1773
+ "ĠJ o",
1774
+ "Ġwal k",
1775
+ "c t",
1776
+ "Ġfore st",
1777
+ "Ġb ra",
1778
+ "N o",
1779
+ "Ġclim b",
1780
+ "a ut",
1781
+ "ĠFr om",
1782
+ "Ġg ive",
1783
+ "Ġbr o",
1784
+ "Ġw o",
1785
+ "Ġs qu",
1786
+ "Ġunt il",
1787
+ "Ġbe aut",
1788
+ "Ġa pp",
1789
+ "ĠE very",
1790
+ "Ġn ext",
1791
+ "Ġfr og",
1792
+ "Ġbo at",
1793
+ "n ing",
1794
+ "Ġst ick",
1795
+ "Ġj o",
1796
+ "Ġt ra",
1797
+ "Ġim p",
1798
+ "Ġpic t",
1799
+ "Ġkid s",
1800
+ "Ġst ay",
1801
+ "Ġbeaut i",
1802
+ "Ġbeauti ful",
1803
+ "Ġclo s",
1804
+ "Th ank",
1805
+ "Ġ R",
1806
+ "Ġd ra",
1807
+ "Ġle a",
1808
+ "Ġlist en",
1809
+ "Ġstor y",
1810
+ "Ġany more",
1811
+ "um my",
1812
+ "Ġwh ile",
1813
+ "p t",
1814
+ "re l",
1815
+ "Ġre m",
1816
+ "Ġtow n",
1817
+ "an e",
1818
+ "Ġc re",
1819
+ "Ġlot s",
1820
+ "ĠA t",
1821
+ "k ay",
1822
+ "Ġa d",
1823
+ "ck s",
1824
+ "ar y",
1825
+ "Ġbe ing",
1826
+ "Ġr ain",
1827
+ "Ġm or",
1828
+ "un ny",
1829
+ "or s",
1830
+ "Ġc ry",
1831
+ "Ġcall ed",
1832
+ "t h",
1833
+ "a nd",
1834
+ "Ġm et",
1835
+ "le s",
1836
+ "Ġg ame",
1837
+ "Ġan gry",
1838
+ "g er",
1839
+ "Ġha t",
1840
+ "le ase",
1841
+ "Ġth re",
1842
+ "se lf",
1843
+ "Ġs oft",
1844
+ "m a",
1845
+ "uff y",
1846
+ "Ġke ep",
1847
+ "Ġb ed",
1848
+ "Ġlo st",
1849
+ "Ġtr uck",
1850
+ "Ġop en",
1851
+ "Ġwa rm",
1852
+ "Ġm ouse",
1853
+ "o on",
1854
+ "Ġat e",
1855
+ "I t",
1856
+ "f ore",
1857
+ "Ġbra ve",
1858
+ "s o",
1859
+ "Ġwat ch",
1860
+ "Ġ G",
1861
+ "Ġsurpr ised",
1862
+ "L et",
1863
+ "ec t",
1864
+ "Ġal so",
1865
+ "Ġf ell",
1866
+ "Ġgre en",
1867
+ "Ġfa ce",
1868
+ "Ġtw o",
1869
+ "ĠF l",
1870
+ "Ġunexp ected",
1871
+ "ĠK itt",
1872
+ "Y ou",
1873
+ "Ġpict ure",
1874
+ "Ġ âĢ",
1875
+ "v ent",
1876
+ "Ġno ise",
1877
+ "bb it",
1878
+ "g an",
1879
+ "ir st",
1880
+ "Ġw here",
1881
+ "l ed",
1882
+ "Ġthan ked",
1883
+ "Ġso on",
1884
+ "k es",
1885
+ "Ġsl ide",
1886
+ "Ġsm ile",
1887
+ "Ġho le",
1888
+ "Ġp us",
1889
+ "d ay",
1890
+ "Ġbe fore",
1891
+ "Ġra bbit",
1892
+ "ir ed",
1893
+ "Ġsa t",
1894
+ "Ġdo ll",
1895
+ "ĠFl uffy",
1896
+ "ir rel",
1897
+ "Ġke pt",
1898
+ "ĠâĢ ľ",
1899
+ "H i",
1900
+ "âĢ Ŀ",
1901
+ "is s",
1902
+ "ow l",
1903
+ "Ġf in",
1904
+ "e ar",
1905
+ "Ġsqu irrel",
1906
+ "Ġjump ed",
1907
+ "Ġbl o",
1908
+ "Ġu se",
1909
+ "a ble",
1910
+ "Ġst ore",
1911
+ "Ġhand s",
1912
+ "Ġbird s",
1913
+ "Ġb re",
1914
+ "Ġp ain",
1915
+ "Ġgo ing",
1916
+ "Ġfun ny"
1917
+ ]
1918
+ }
1919
+ }
tokenizer_config.json ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "added_tokens_decoder": {
3
+ "1024": {
4
+ "content": "<|endoftext|>",
5
+ "lstrip": false,
6
+ "normalized": true,
7
+ "rstrip": false,
8
+ "single_word": false,
9
+ "special": true
10
+ }
11
+ },
12
+ "clean_up_tokenization_spaces": true,
13
+ "model_max_length": 1000000000000000019884624838656,
14
+ "tokenizer_class": "PreTrainedTokenizerFast"
15
+ }
train_results.json ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 1.0,
3
+ "total_flos": 3.19615549046784e+17,
4
+ "train_loss": 1.5581016430754755,
5
+ "train_runtime": 7493.9423,
6
+ "train_samples": 611606,
7
+ "train_samples_per_second": 81.613,
8
+ "train_steps_per_second": 2.55
9
+ }
trainer_state.json ADDED
@@ -0,0 +1,479 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_metric": null,
3
+ "best_model_checkpoint": null,
4
+ "epoch": 1.0,
5
+ "eval_steps": 1000,
6
+ "global_step": 19113,
7
+ "is_hyper_param_search": false,
8
+ "is_local_process_zero": true,
9
+ "is_world_process_zero": true,
10
+ "log_history": [
11
+ {
12
+ "epoch": 0.026160205096007954,
13
+ "grad_norm": 2.7528529167175293,
14
+ "learning_rate": 4.869198974519961e-05,
15
+ "loss": 4.403,
16
+ "step": 500
17
+ },
18
+ {
19
+ "epoch": 0.05232041019201591,
20
+ "grad_norm": 2.5874102115631104,
21
+ "learning_rate": 4.7383979490399206e-05,
22
+ "loss": 2.92,
23
+ "step": 1000
24
+ },
25
+ {
26
+ "epoch": 0.05232041019201591,
27
+ "eval_accuracy": 0.4470485610347474,
28
+ "eval_loss": 2.4565796852111816,
29
+ "eval_runtime": 53.659,
30
+ "eval_samples_per_second": 114.631,
31
+ "eval_steps_per_second": 3.597,
32
+ "step": 1000
33
+ },
34
+ {
35
+ "epoch": 0.07848061528802386,
36
+ "grad_norm": 2.103119134902954,
37
+ "learning_rate": 4.607596923559881e-05,
38
+ "loss": 2.2955,
39
+ "step": 1500
40
+ },
41
+ {
42
+ "epoch": 0.10464082038403182,
43
+ "grad_norm": 2.2540347576141357,
44
+ "learning_rate": 4.476795898079841e-05,
45
+ "loss": 1.9818,
46
+ "step": 2000
47
+ },
48
+ {
49
+ "epoch": 0.10464082038403182,
50
+ "eval_accuracy": 0.5672200738882789,
51
+ "eval_loss": 1.8089067935943604,
52
+ "eval_runtime": 53.7114,
53
+ "eval_samples_per_second": 114.52,
54
+ "eval_steps_per_second": 3.593,
55
+ "step": 2000
56
+ },
57
+ {
58
+ "epoch": 0.13080102548003977,
59
+ "grad_norm": 1.4521691799163818,
60
+ "learning_rate": 4.345994872599801e-05,
61
+ "loss": 1.83,
62
+ "step": 2500
63
+ },
64
+ {
65
+ "epoch": 0.15696123057604772,
66
+ "grad_norm": 1.3930741548538208,
67
+ "learning_rate": 4.2151938471197614e-05,
68
+ "loss": 1.7321,
69
+ "step": 3000
70
+ },
71
+ {
72
+ "epoch": 0.15696123057604772,
73
+ "eval_accuracy": 0.6020537553359386,
74
+ "eval_loss": 1.613312005996704,
75
+ "eval_runtime": 53.6022,
76
+ "eval_samples_per_second": 114.753,
77
+ "eval_steps_per_second": 3.601,
78
+ "step": 3000
79
+ },
80
+ {
81
+ "epoch": 0.18312143567205566,
82
+ "grad_norm": 1.4211032390594482,
83
+ "learning_rate": 4.084392821639722e-05,
84
+ "loss": 1.6624,
85
+ "step": 3500
86
+ },
87
+ {
88
+ "epoch": 0.20928164076806363,
89
+ "grad_norm": 1.275313138961792,
90
+ "learning_rate": 3.953591796159682e-05,
91
+ "loss": 1.6137,
92
+ "step": 4000
93
+ },
94
+ {
95
+ "epoch": 0.20928164076806363,
96
+ "eval_accuracy": 0.6195058763065014,
97
+ "eval_loss": 1.5170681476593018,
98
+ "eval_runtime": 53.3002,
99
+ "eval_samples_per_second": 115.403,
100
+ "eval_steps_per_second": 3.621,
101
+ "step": 4000
102
+ },
103
+ {
104
+ "epoch": 0.23544184586407158,
105
+ "grad_norm": 1.2738176584243774,
106
+ "learning_rate": 3.8227907706796424e-05,
107
+ "loss": 1.5721,
108
+ "step": 4500
109
+ },
110
+ {
111
+ "epoch": 0.26160205096007955,
112
+ "grad_norm": 1.2026641368865967,
113
+ "learning_rate": 3.691989745199603e-05,
114
+ "loss": 1.5353,
115
+ "step": 5000
116
+ },
117
+ {
118
+ "epoch": 0.26160205096007955,
119
+ "eval_accuracy": 0.6311744206133264,
120
+ "eval_loss": 1.4516141414642334,
121
+ "eval_runtime": 53.8002,
122
+ "eval_samples_per_second": 114.33,
123
+ "eval_steps_per_second": 3.587,
124
+ "step": 5000
125
+ },
126
+ {
127
+ "epoch": 0.28776225605608746,
128
+ "grad_norm": 1.1667968034744263,
129
+ "learning_rate": 3.561188719719563e-05,
130
+ "loss": 1.5091,
131
+ "step": 5500
132
+ },
133
+ {
134
+ "epoch": 0.31392246115209543,
135
+ "grad_norm": 1.4095014333724976,
136
+ "learning_rate": 3.430387694239523e-05,
137
+ "loss": 1.4845,
138
+ "step": 6000
139
+ },
140
+ {
141
+ "epoch": 0.31392246115209543,
142
+ "eval_accuracy": 0.6400102153795495,
143
+ "eval_loss": 1.4056029319763184,
144
+ "eval_runtime": 53.6423,
145
+ "eval_samples_per_second": 114.667,
146
+ "eval_steps_per_second": 3.598,
147
+ "step": 6000
148
+ },
149
+ {
150
+ "epoch": 0.3400826662481034,
151
+ "grad_norm": 1.2620683908462524,
152
+ "learning_rate": 3.299586668759483e-05,
153
+ "loss": 1.4606,
154
+ "step": 6500
155
+ },
156
+ {
157
+ "epoch": 0.3662428713441113,
158
+ "grad_norm": 1.2207545042037964,
159
+ "learning_rate": 3.168785643279443e-05,
160
+ "loss": 1.4443,
161
+ "step": 7000
162
+ },
163
+ {
164
+ "epoch": 0.3662428713441113,
165
+ "eval_accuracy": 0.646570593151532,
166
+ "eval_loss": 1.371826171875,
167
+ "eval_runtime": 53.6024,
168
+ "eval_samples_per_second": 114.752,
169
+ "eval_steps_per_second": 3.601,
170
+ "step": 7000
171
+ },
172
+ {
173
+ "epoch": 0.3924030764401193,
174
+ "grad_norm": 1.1445685625076294,
175
+ "learning_rate": 3.0379846177994036e-05,
176
+ "loss": 1.4255,
177
+ "step": 7500
178
+ },
179
+ {
180
+ "epoch": 0.41856328153612726,
181
+ "grad_norm": 1.1666637659072876,
182
+ "learning_rate": 2.907183592319364e-05,
183
+ "loss": 1.4118,
184
+ "step": 8000
185
+ },
186
+ {
187
+ "epoch": 0.41856328153612726,
188
+ "eval_accuracy": 0.6524860734404423,
189
+ "eval_loss": 1.3419677019119263,
190
+ "eval_runtime": 53.4631,
191
+ "eval_samples_per_second": 115.051,
192
+ "eval_steps_per_second": 3.61,
193
+ "step": 8000
194
+ },
195
+ {
196
+ "epoch": 0.4447234866321352,
197
+ "grad_norm": 1.2419095039367676,
198
+ "learning_rate": 2.776382566839324e-05,
199
+ "loss": 1.4015,
200
+ "step": 8500
201
+ },
202
+ {
203
+ "epoch": 0.47088369172814315,
204
+ "grad_norm": 1.1277480125427246,
205
+ "learning_rate": 2.6455815413592845e-05,
206
+ "loss": 1.3878,
207
+ "step": 9000
208
+ },
209
+ {
210
+ "epoch": 0.47088369172814315,
211
+ "eval_accuracy": 0.6569193066064805,
212
+ "eval_loss": 1.3188564777374268,
213
+ "eval_runtime": 53.2918,
214
+ "eval_samples_per_second": 115.421,
215
+ "eval_steps_per_second": 3.622,
216
+ "step": 9000
217
+ },
218
+ {
219
+ "epoch": 0.4970438968241511,
220
+ "grad_norm": 1.0969843864440918,
221
+ "learning_rate": 2.5147805158792447e-05,
222
+ "loss": 1.3773,
223
+ "step": 9500
224
+ },
225
+ {
226
+ "epoch": 0.5232041019201591,
227
+ "grad_norm": 1.1453758478164673,
228
+ "learning_rate": 2.383979490399205e-05,
229
+ "loss": 1.3661,
230
+ "step": 10000
231
+ },
232
+ {
233
+ "epoch": 0.5232041019201591,
234
+ "eval_accuracy": 0.6608493989564993,
235
+ "eval_loss": 1.2987806797027588,
236
+ "eval_runtime": 53.4371,
237
+ "eval_samples_per_second": 115.107,
238
+ "eval_steps_per_second": 3.612,
239
+ "step": 10000
240
+ },
241
+ {
242
+ "epoch": 0.549364307016167,
243
+ "grad_norm": 1.1494508981704712,
244
+ "learning_rate": 2.253178464919165e-05,
245
+ "loss": 1.3563,
246
+ "step": 10500
247
+ },
248
+ {
249
+ "epoch": 0.5755245121121749,
250
+ "grad_norm": 1.1540991067886353,
251
+ "learning_rate": 2.1223774394391253e-05,
252
+ "loss": 1.3485,
253
+ "step": 11000
254
+ },
255
+ {
256
+ "epoch": 0.5755245121121749,
257
+ "eval_accuracy": 0.6638841358556485,
258
+ "eval_loss": 1.2833938598632812,
259
+ "eval_runtime": 53.55,
260
+ "eval_samples_per_second": 114.865,
261
+ "eval_steps_per_second": 3.604,
262
+ "step": 11000
263
+ },
264
+ {
265
+ "epoch": 0.601684717208183,
266
+ "grad_norm": 1.1493678092956543,
267
+ "learning_rate": 1.9915764139590855e-05,
268
+ "loss": 1.3393,
269
+ "step": 11500
270
+ },
271
+ {
272
+ "epoch": 0.6278449223041909,
273
+ "grad_norm": 1.1292744874954224,
274
+ "learning_rate": 1.8607753884790457e-05,
275
+ "loss": 1.3326,
276
+ "step": 12000
277
+ },
278
+ {
279
+ "epoch": 0.6278449223041909,
280
+ "eval_accuracy": 0.6669099732330993,
281
+ "eval_loss": 1.2675042152404785,
282
+ "eval_runtime": 54.1653,
283
+ "eval_samples_per_second": 113.56,
284
+ "eval_steps_per_second": 3.563,
285
+ "step": 12000
286
+ },
287
+ {
288
+ "epoch": 0.6540051274001988,
289
+ "grad_norm": 1.1094636917114258,
290
+ "learning_rate": 1.729974362999006e-05,
291
+ "loss": 1.3249,
292
+ "step": 12500
293
+ },
294
+ {
295
+ "epoch": 0.6801653324962068,
296
+ "grad_norm": 1.1196491718292236,
297
+ "learning_rate": 1.5991733375189664e-05,
298
+ "loss": 1.319,
299
+ "step": 13000
300
+ },
301
+ {
302
+ "epoch": 0.6801653324962068,
303
+ "eval_accuracy": 0.6694385498356529,
304
+ "eval_loss": 1.2554900646209717,
305
+ "eval_runtime": 53.6683,
306
+ "eval_samples_per_second": 114.611,
307
+ "eval_steps_per_second": 3.596,
308
+ "step": 13000
309
+ },
310
+ {
311
+ "epoch": 0.7063255375922147,
312
+ "grad_norm": 1.1508065462112427,
313
+ "learning_rate": 1.4683723120389265e-05,
314
+ "loss": 1.3118,
315
+ "step": 13500
316
+ },
317
+ {
318
+ "epoch": 0.7324857426882226,
319
+ "grad_norm": 1.0860583782196045,
320
+ "learning_rate": 1.3375712865588865e-05,
321
+ "loss": 1.3068,
322
+ "step": 14000
323
+ },
324
+ {
325
+ "epoch": 0.7324857426882226,
326
+ "eval_accuracy": 0.6718871896629512,
327
+ "eval_loss": 1.2440038919448853,
328
+ "eval_runtime": 53.2979,
329
+ "eval_samples_per_second": 115.408,
330
+ "eval_steps_per_second": 3.621,
331
+ "step": 14000
332
+ },
333
+ {
334
+ "epoch": 0.7586459477842307,
335
+ "grad_norm": 1.126051902770996,
336
+ "learning_rate": 1.2067702610788469e-05,
337
+ "loss": 1.3014,
338
+ "step": 14500
339
+ },
340
+ {
341
+ "epoch": 0.7848061528802386,
342
+ "grad_norm": 1.100677490234375,
343
+ "learning_rate": 1.075969235598807e-05,
344
+ "loss": 1.2932,
345
+ "step": 15000
346
+ },
347
+ {
348
+ "epoch": 0.7848061528802386,
349
+ "eval_accuracy": 0.6737417864168825,
350
+ "eval_loss": 1.2349998950958252,
351
+ "eval_runtime": 53.6159,
352
+ "eval_samples_per_second": 114.723,
353
+ "eval_steps_per_second": 3.6,
354
+ "step": 15000
355
+ },
356
+ {
357
+ "epoch": 0.8109663579762465,
358
+ "grad_norm": 1.1026638746261597,
359
+ "learning_rate": 9.451682101187674e-06,
360
+ "loss": 1.2913,
361
+ "step": 15500
362
+ },
363
+ {
364
+ "epoch": 0.8371265630722545,
365
+ "grad_norm": 1.152256727218628,
366
+ "learning_rate": 8.143671846387275e-06,
367
+ "loss": 1.2868,
368
+ "step": 16000
369
+ },
370
+ {
371
+ "epoch": 0.8371265630722545,
372
+ "eval_accuracy": 0.6755100895943456,
373
+ "eval_loss": 1.2262605428695679,
374
+ "eval_runtime": 53.3362,
375
+ "eval_samples_per_second": 115.325,
376
+ "eval_steps_per_second": 3.619,
377
+ "step": 16000
378
+ },
379
+ {
380
+ "epoch": 0.8632867681682624,
381
+ "grad_norm": 1.1048023700714111,
382
+ "learning_rate": 6.835661591586878e-06,
383
+ "loss": 1.2809,
384
+ "step": 16500
385
+ },
386
+ {
387
+ "epoch": 0.8894469732642704,
388
+ "grad_norm": 1.1195833683013916,
389
+ "learning_rate": 5.527651336786481e-06,
390
+ "loss": 1.2791,
391
+ "step": 17000
392
+ },
393
+ {
394
+ "epoch": 0.8894469732642704,
395
+ "eval_accuracy": 0.6771291668633302,
396
+ "eval_loss": 1.219257116317749,
397
+ "eval_runtime": 53.5262,
398
+ "eval_samples_per_second": 114.916,
399
+ "eval_steps_per_second": 3.606,
400
+ "step": 17000
401
+ },
402
+ {
403
+ "epoch": 0.9156071783602784,
404
+ "grad_norm": 1.1175094842910767,
405
+ "learning_rate": 4.219641081986083e-06,
406
+ "loss": 1.2743,
407
+ "step": 17500
408
+ },
409
+ {
410
+ "epoch": 0.9417673834562863,
411
+ "grad_norm": 1.1169252395629883,
412
+ "learning_rate": 2.911630827185685e-06,
413
+ "loss": 1.2725,
414
+ "step": 18000
415
+ },
416
+ {
417
+ "epoch": 0.9417673834562863,
418
+ "eval_accuracy": 0.6780440694779302,
419
+ "eval_loss": 1.214146614074707,
420
+ "eval_runtime": 53.7753,
421
+ "eval_samples_per_second": 114.383,
422
+ "eval_steps_per_second": 3.589,
423
+ "step": 18000
424
+ },
425
+ {
426
+ "epoch": 0.9679275885522942,
427
+ "grad_norm": 1.1183288097381592,
428
+ "learning_rate": 1.6036205723852876e-06,
429
+ "loss": 1.2693,
430
+ "step": 18500
431
+ },
432
+ {
433
+ "epoch": 0.9940877936483022,
434
+ "grad_norm": 1.10493803024292,
435
+ "learning_rate": 2.956103175848899e-07,
436
+ "loss": 1.2711,
437
+ "step": 19000
438
+ },
439
+ {
440
+ "epoch": 0.9940877936483022,
441
+ "eval_accuracy": 0.6788121299845069,
442
+ "eval_loss": 1.2107646465301514,
443
+ "eval_runtime": 53.4355,
444
+ "eval_samples_per_second": 115.111,
445
+ "eval_steps_per_second": 3.612,
446
+ "step": 19000
447
+ },
448
+ {
449
+ "epoch": 1.0,
450
+ "step": 19113,
451
+ "total_flos": 3.19615549046784e+17,
452
+ "train_loss": 1.5581016430754755,
453
+ "train_runtime": 7493.9423,
454
+ "train_samples_per_second": 81.613,
455
+ "train_steps_per_second": 2.55
456
+ }
457
+ ],
458
+ "logging_steps": 500,
459
+ "max_steps": 19113,
460
+ "num_input_tokens_seen": 0,
461
+ "num_train_epochs": 1,
462
+ "save_steps": 1000,
463
+ "stateful_callbacks": {
464
+ "TrainerControl": {
465
+ "args": {
466
+ "should_epoch_stop": false,
467
+ "should_evaluate": false,
468
+ "should_log": false,
469
+ "should_save": true,
470
+ "should_training_stop": true
471
+ },
472
+ "attributes": {}
473
+ }
474
+ },
475
+ "total_flos": 3.19615549046784e+17,
476
+ "train_batch_size": 32,
477
+ "trial_name": null,
478
+ "trial_params": null
479
+ }
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e9b51e06a31bb1c9757b6e1a4500835749b8cf1c45052dfc464fd94dd0134833
3
+ size 5176