Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- train/books-gutenberg-dup-sampled-decay/shard_00003-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds +3 -0
- train/books-gutenberg-dup-sampled-decay/shard_00005-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds +3 -0
- train/books-gutenberg-dup-sampled-decay/shard_00005-tokenized-chunked-8192-512-32-backfill-nodups/shard.00003.mds +3 -0
- train/books-gutenberg-dup-sampled-decay/shard_00005-tokenized-chunked-8192-512-32-backfill-nodups/shard.00009.mds +3 -0
- train/books-gutenberg-dup-sampled-decay/shard_00008-tokenized-chunked-8192-512-32-backfill-nodups/shard.00005.mds +3 -0
- train/books-gutenberg-dup-sampled-decay/shard_00008-tokenized-chunked-8192-512-32-backfill-nodups/shard.00008.mds +3 -0
- train/books-gutenberg-dup-sampled-decay/shard_00008-tokenized-chunked-8192-512-32-backfill-nodups/shard.00011.mds +3 -0
- train/multi-wikis/eng_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_104-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
- train/multi-wikis/eng_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_104-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
- train/multi-wikis/eng_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_104-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
- train/multi-wikis/eng_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_114-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
- train/multi-wikis/eng_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_114-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
- train/multi-wikis/eng_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_114-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
- train/multi-wikis/eng_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_12-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
- train/multi-wikis/eng_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_12-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
- train/multi-wikis/eng_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_12-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
- train/multi-wikis/eng_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_13-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
- train/multi-wikis/eng_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_13-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
- train/multi-wikis/eng_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_13-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
- train/multi-wikis/eng_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_14-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
- train/multi-wikis/eng_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_14-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
- train/multi-wikis/eng_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_14-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
- train/multi-wikis/eng_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_18-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
- train/multi-wikis/eng_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_18-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
- train/multi-wikis/eng_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_18-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
- train/multi-wikis/eng_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_25-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
- train/multi-wikis/eng_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_38-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
- train/multi-wikis/eng_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_38-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
- train/multi-wikis/eng_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_38-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
- train/multi-wikis/eng_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_4-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
- train/multi-wikis/eng_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_4-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
- train/multi-wikis/eng_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_4-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
- train/multi-wikis/eng_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_41-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
- train/multi-wikis/eng_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_41-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
- train/multi-wikis/eng_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_41-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
- train/multi-wikis/eng_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_47-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
- train/multi-wikis/eng_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_47-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
- train/multi-wikis/eng_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_47-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
- train/multi-wikis/eng_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_49-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
- train/multi-wikis/eng_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_49-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
- train/multi-wikis/eng_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_49-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
- train/multi-wikis/eng_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_51-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
- train/multi-wikis/eng_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_51-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
- train/multi-wikis/eng_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_51-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
- train/multi-wikis/eng_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_54-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
- train/multi-wikis/eng_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_54-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
- train/multi-wikis/eng_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_54-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
- train/multi-wikis/eng_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_56-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
- train/multi-wikis/eng_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_56-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
- train/multi-wikis/eng_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_56-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
train/books-gutenberg-dup-sampled-decay/shard_00003-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:3b80d8269383a3e8d6c019c4bb288fe9a94c3984cdc8ddd6b3f396c1ebaae622
|
3 |
+
size 67100919
|
train/books-gutenberg-dup-sampled-decay/shard_00005-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:bb968e65cd388a98ff977b12f44a8b2844e13619fcb9c4a1affcb176b85a8ec3
|
3 |
+
size 67097165
|
train/books-gutenberg-dup-sampled-decay/shard_00005-tokenized-chunked-8192-512-32-backfill-nodups/shard.00003.mds
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:dca26af7ca08b0571bb6984bb71bbfb0531249b93ea2ce90f48db61d555b5f6c
|
3 |
+
size 67105366
|
train/books-gutenberg-dup-sampled-decay/shard_00005-tokenized-chunked-8192-512-32-backfill-nodups/shard.00009.mds
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:eb101496aea573abf5aafbc97b45414a055b942b3a1471fac3a59c37f52d85fc
|
3 |
+
size 67103625
|
train/books-gutenberg-dup-sampled-decay/shard_00008-tokenized-chunked-8192-512-32-backfill-nodups/shard.00005.mds
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:004011d73f335e87c464b027c3a80dc7d8666843a2052f5d68da12dfdc11aa15
|
3 |
+
size 67102599
|
train/books-gutenberg-dup-sampled-decay/shard_00008-tokenized-chunked-8192-512-32-backfill-nodups/shard.00008.mds
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:db3bf6115139fb7d422a55f9cfd9724e25358e41af8735291c06e56405077625
|
3 |
+
size 67076298
|
train/books-gutenberg-dup-sampled-decay/shard_00008-tokenized-chunked-8192-512-32-backfill-nodups/shard.00011.mds
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:3da4f5db8475e12ccc5ae486e2a917b53efdb66406c5f03edc6c0e2491aa09ff
|
3 |
+
size 67087369
|
train/multi-wikis/eng_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_104-tokenized-chunked-8192-512-32-backfill-nodups/index.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 26767896, "hashes": {}}, "samples": 50000, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 9915535, "hashes": {}}}], "version": 2}
|
train/multi-wikis/eng_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_104-tokenized-chunked-8192-512-32-backfill-nodups/stats.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"total_duplicated_tokens": 0, "total_tokens_written": 6315736, "total_tokens_skipped": 0, "percentiles": {"0th": 20, "10th": 45, "20th": 54, "30th": 64, "40th": 76, "50th": 90, "60th": 108, "70th": 134, "80th": 173, "90th": 249, "95th": 336, "99th": 565, "100th": 4172}}
|
train/multi-wikis/eng_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_104-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
train/multi-wikis/eng_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_114-tokenized-chunked-8192-512-32-backfill-nodups/index.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 27802020, "hashes": {}}, "samples": 50000, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 10369237, "hashes": {}}}], "version": 2}
|
train/multi-wikis/eng_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_114-tokenized-chunked-8192-512-32-backfill-nodups/stats.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"total_duplicated_tokens": 0, "total_tokens_written": 6574107, "total_tokens_skipped": 0, "percentiles": {"0th": 19, "10th": 44, "20th": 53, "30th": 64, "40th": 76, "50th": 91, "60th": 111, "70th": 138, "80th": 181, "90th": 265, "95th": 362, "99th": 598, "100th": 5964}}
|
train/multi-wikis/eng_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_114-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
train/multi-wikis/eng_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_12-tokenized-chunked-8192-512-32-backfill-nodups/index.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 27415366, "hashes": {}}, "samples": 50002, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 10263398, "hashes": {}}}], "version": 2}
|
train/multi-wikis/eng_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_12-tokenized-chunked-8192-512-32-backfill-nodups/stats.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"total_duplicated_tokens": 0, "total_tokens_written": 6489998, "total_tokens_skipped": 0, "percentiles": {"0th": 21, "10th": 44, "20th": 54, "30th": 65, "40th": 77, "50th": 92, "60th": 110, "70th": 138, "80th": 178, "90th": 258, "95th": 348, "99th": 569, "100th": 8190}}
|
train/multi-wikis/eng_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_12-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
train/multi-wikis/eng_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_13-tokenized-chunked-8192-512-32-backfill-nodups/index.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 26274883, "hashes": {}}, "samples": 50000, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 9694424, "hashes": {}}}], "version": 2}
|
train/multi-wikis/eng_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_13-tokenized-chunked-8192-512-32-backfill-nodups/stats.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"total_duplicated_tokens": 0, "total_tokens_written": 6204997, "total_tokens_skipped": 0, "percentiles": {"0th": 19, "10th": 44, "20th": 52, "30th": 62, "40th": 74, "50th": 88, "60th": 106, "70th": 132, "80th": 170, "90th": 246, "95th": 333, "99th": 563, "100th": 5217}}
|
train/multi-wikis/eng_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_13-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
train/multi-wikis/eng_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_14-tokenized-chunked-8192-512-32-backfill-nodups/index.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 28064789, "hashes": {}}, "samples": 50000, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 10592204, "hashes": {}}}], "version": 2}
|
train/multi-wikis/eng_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_14-tokenized-chunked-8192-512-32-backfill-nodups/stats.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"total_duplicated_tokens": 0, "total_tokens_written": 6652239, "total_tokens_skipped": 0, "percentiles": {"0th": 20, "10th": 44, "20th": 55, "30th": 66, "40th": 79, "50th": 94, "60th": 114, "70th": 142, "80th": 184, "90th": 272, "95th": 363, "99th": 594, "100th": 3592}}
|
train/multi-wikis/eng_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_14-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
train/multi-wikis/eng_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_18-tokenized-chunked-8192-512-32-backfill-nodups/index.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 26547056, "hashes": {}}, "samples": 50000, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 9792978, "hashes": {}}}], "version": 2}
|
train/multi-wikis/eng_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_18-tokenized-chunked-8192-512-32-backfill-nodups/stats.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"total_duplicated_tokens": 0, "total_tokens_written": 6273013, "total_tokens_skipped": 0, "percentiles": {"0th": 21, "10th": 44, "20th": 53, "30th": 63, "40th": 74, "50th": 89, "60th": 107, "70th": 132, "80th": 172, "90th": 250, "95th": 339, "99th": 550, "100th": 7348}}
|
train/multi-wikis/eng_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_18-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
train/multi-wikis/eng_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_25-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
train/multi-wikis/eng_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_38-tokenized-chunked-8192-512-32-backfill-nodups/index.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 26831405, "hashes": {}}, "samples": 50000, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 9935109, "hashes": {}}}], "version": 2}
|
train/multi-wikis/eng_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_38-tokenized-chunked-8192-512-32-backfill-nodups/stats.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"total_duplicated_tokens": 0, "total_tokens_written": 6331557, "total_tokens_skipped": 0, "percentiles": {"0th": 20, "10th": 44, "20th": 53, "30th": 64, "40th": 76, "50th": 90, "60th": 108, "70th": 134, "80th": 174, "90th": 254, "95th": 341, "99th": 561, "100th": 3989}}
|
train/multi-wikis/eng_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_38-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
train/multi-wikis/eng_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_4-tokenized-chunked-8192-512-32-backfill-nodups/index.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 27626107, "hashes": {}}, "samples": 50000, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 10217563, "hashes": {}}}], "version": 2}
|
train/multi-wikis/eng_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_4-tokenized-chunked-8192-512-32-backfill-nodups/stats.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"total_duplicated_tokens": 0, "total_tokens_written": 6542587, "total_tokens_skipped": 0, "percentiles": {"0th": 18, "10th": 44, "20th": 53, "30th": 64, "40th": 76, "50th": 90, "60th": 111, "70th": 139, "80th": 182, "90th": 269, "95th": 361, "99th": 588, "100th": 7047}}
|
train/multi-wikis/eng_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_4-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
train/multi-wikis/eng_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_41-tokenized-chunked-8192-512-32-backfill-nodups/index.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 26700933, "hashes": {}}, "samples": 50000, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 9881827, "hashes": {}}}], "version": 2}
|
train/multi-wikis/eng_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_41-tokenized-chunked-8192-512-32-backfill-nodups/stats.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"total_duplicated_tokens": 0, "total_tokens_written": 6298965, "total_tokens_skipped": 0, "percentiles": {"0th": 21, "10th": 44, "20th": 53, "30th": 63, "40th": 74, "50th": 88, "60th": 107, "70th": 133, "80th": 172, "90th": 252, "95th": 340, "99th": 579, "100th": 5022}}
|
train/multi-wikis/eng_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_41-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
train/multi-wikis/eng_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_47-tokenized-chunked-8192-512-32-backfill-nodups/index.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 26349011, "hashes": {}}, "samples": 50000, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 9791320, "hashes": {}}}], "version": 2}
|
train/multi-wikis/eng_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_47-tokenized-chunked-8192-512-32-backfill-nodups/stats.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"total_duplicated_tokens": 0, "total_tokens_written": 6211045, "total_tokens_skipped": 0, "percentiles": {"0th": 21, "10th": 43, "20th": 52, "30th": 63, "40th": 74, "50th": 88, "60th": 107, "70th": 132, "80th": 171, "90th": 247, "95th": 331, "99th": 556, "100th": 5248}}
|
train/multi-wikis/eng_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_47-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
train/multi-wikis/eng_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_49-tokenized-chunked-8192-512-32-backfill-nodups/index.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 27382926, "hashes": {}}, "samples": 50000, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 10213183, "hashes": {}}}], "version": 2}
|
train/multi-wikis/eng_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_49-tokenized-chunked-8192-512-32-backfill-nodups/stats.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"total_duplicated_tokens": 0, "total_tokens_written": 6469385, "total_tokens_skipped": 0, "percentiles": {"0th": 19, "10th": 44, "20th": 54, "30th": 65, "40th": 76, "50th": 91, "60th": 111, "70th": 138, "80th": 179, "90th": 259, "95th": 349, "99th": 573, "100th": 3998}}
|
train/multi-wikis/eng_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_49-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
train/multi-wikis/eng_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_51-tokenized-chunked-8192-512-32-backfill-nodups/index.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 28272251, "hashes": {}}, "samples": 50001, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 10614176, "hashes": {}}}], "version": 2}
|
train/multi-wikis/eng_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_51-tokenized-chunked-8192-512-32-backfill-nodups/stats.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"total_duplicated_tokens": 0, "total_tokens_written": 6691599, "total_tokens_skipped": 0, "percentiles": {"0th": 20, "10th": 45, "20th": 54, "30th": 65, "40th": 77, "50th": 93, "60th": 113, "70th": 141, "80th": 186, "90th": 270, "95th": 367, "99th": 598, "100th": 8190}}
|
train/multi-wikis/eng_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_51-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
train/multi-wikis/eng_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_54-tokenized-chunked-8192-512-32-backfill-nodups/index.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 27102140, "hashes": {}}, "samples": 50000, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 10070605, "hashes": {}}}], "version": 2}
|
train/multi-wikis/eng_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_54-tokenized-chunked-8192-512-32-backfill-nodups/stats.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"total_duplicated_tokens": 0, "total_tokens_written": 6399194, "total_tokens_skipped": 0, "percentiles": {"0th": 17, "10th": 44, "20th": 53, "30th": 64, "40th": 75, "50th": 90, "60th": 109, "70th": 135, "80th": 176, "90th": 259, "95th": 349, "99th": 575, "100th": 3708}}
|
train/multi-wikis/eng_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_54-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
train/multi-wikis/eng_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_56-tokenized-chunked-8192-512-32-backfill-nodups/index.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 26792930, "hashes": {}}, "samples": 50000, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 10023257, "hashes": {}}}], "version": 2}
|
train/multi-wikis/eng_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_56-tokenized-chunked-8192-512-32-backfill-nodups/stats.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"total_duplicated_tokens": 0, "total_tokens_written": 6321971, "total_tokens_skipped": 0, "percentiles": {"0th": 20, "10th": 44, "20th": 54, "30th": 64, "40th": 75, "50th": 90, "60th": 109, "70th": 134, "80th": 173, "90th": 251, "95th": 338, "99th": 558, "100th": 5141}}
|
train/multi-wikis/eng_Latn-tokenized-chunked-8192-512-32-backfill-nodups/articles_56-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|