Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- train/fineweb2-sampled-decay-v2/dan_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00002-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
- train/fineweb2-sampled-decay-v2/dan_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00002-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
- train/fineweb2-sampled-decay-v2/dan_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00002-batch_0001-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds +3 -0
- train/fineweb2-sampled-decay-v2/dan_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00002-batch_0001-tokenized-chunked-8192-512-32-backfill-nodups/shard.00004.mds +3 -0
- train/fineweb2-sampled-decay-v2/dan_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00005-batch_0001-tokenized-chunked-8192-512-32-backfill-nodups/shard.00003.mds +3 -0
- train/fineweb2-sampled-decay-v2/dan_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00010-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds +3 -0
- train/fineweb2-sampled-decay-v2/dan_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00014-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds +3 -0
- train/fineweb2-sampled-decay-v2/dan_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00015-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
- train/fineweb2-sampled-decay-v2/dan_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00015-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00004.mds +3 -0
- train/fineweb2-sampled-decay-v2/dan_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00015-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
- train/fineweb2-sampled-decay-v2/dan_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00015-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
- train/fineweb2-sampled-decay-v2/dan_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00020-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00003.mds +3 -0
- train/fineweb2-sampled-decay-v2/dan_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00020-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/shard.00003.mds +3 -0
- train/fineweb2-sampled-decay-v2/dan_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00021-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds +3 -0
- train/fineweb2-sampled-decay-v2/dan_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00026-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds +3 -0
- train/fineweb2-sampled-decay-v2/dan_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00027-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds +3 -0
- train/fineweb2-sampled-decay-v2/dan_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00029-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds +3 -0
- train/fineweb2-sampled-decay-v2/dan_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00030-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
- train/fineweb2-sampled-decay-v2/dan_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00030-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/shard.00004.mds +3 -0
- train/fineweb2-sampled-decay-v2/dan_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00030-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
- train/fineweb2-sampled-decay-v2/dan_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00030-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
- train/fineweb2-sampled-decay-v2/dan_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00033-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/shard.00003.mds +3 -0
- train/fineweb2-sampled-decay-v2/dan_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00034-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00003.mds +3 -0
- train/fineweb2-sampled-decay-v2/dan_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00043-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds +3 -0
- train/fineweb2-sampled-decay-v2/dan_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00045-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00003.mds +3 -0
- train/fineweb2-sampled-decay-v2/dan_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00047-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds +3 -0
- train/fineweb2-sampled-decay-v2/dan_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00047-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds +3 -0
- train/fineweb2-sampled-decay-v2/dan_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00047-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00003.mds +3 -0
- train/fineweb2-sampled-decay-v2/dan_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00052-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds +3 -0
- train/fineweb2-sampled-decay-v2/dan_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00052-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds +3 -0
- train/fineweb2-sampled-decay-v2/ind_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00035-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds +3 -0
- train/fineweb2-sampled-decay-v2/ind_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00035-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds +3 -0
- train/fineweb2-sampled-decay-v2/ind_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00087-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds +3 -0
- train/fineweb2-sampled-decay-v2/lvs_Latn_train-sampled/batch_0013-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
- train/fineweb2-sampled-decay-v2/lvs_Latn_train-sampled/batch_0013-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
- train/fineweb2-sampled-decay-v2/lvs_Latn_train-sampled/batch_0013-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
- train/fineweb2-sampled-decay-v2/lvs_Latn_train-sampled/batch_0017-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
- train/fineweb2-sampled-decay-v2/lvs_Latn_train-sampled/batch_0017-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
- train/fineweb2-sampled-decay-v2/lvs_Latn_train-sampled/batch_0017-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
- train/fineweb2-sampled-decay-v2/lvs_Latn_train-sampled/batch_0022-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
- train/fineweb2-sampled-decay-v2/lvs_Latn_train-sampled/batch_0022-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
- train/fineweb2-sampled-decay-v2/lvs_Latn_train-sampled/batch_0022-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
- train/fineweb2-sampled-decay-v2/lvs_Latn_train-sampled/batch_0033-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
- train/fineweb2-sampled-decay-v2/lvs_Latn_train-sampled/batch_0033-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
- train/fineweb2-sampled-decay-v2/lvs_Latn_train-sampled/batch_0033-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
- train/fineweb2-sampled-decay-v2/lvs_Latn_train-sampled/batch_0079-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
- train/fineweb2-sampled-decay-v2/lvs_Latn_train-sampled/batch_0079-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
- train/fineweb2-sampled-decay-v2/lvs_Latn_train-sampled/batch_0079-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
- train/fineweb2-sampled-decay-v2/lvs_Latn_train-sampled/batch_0127-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
- train/fineweb2-sampled-decay-v2/lvs_Latn_train-sampled/batch_0127-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
train/fineweb2-sampled-decay-v2/dan_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00002-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/index.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67103980, "hashes": {}}, "samples": 21287, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 5684235, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 67082218, "hashes": {}}, "samples": 21070, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 6249475, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00002.mds", "bytes": 67108338, "hashes": {}}, "samples": 18982, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00002.mds.zstd", "bytes": 7298514, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00003.mds", "bytes": 67082616, "hashes": {}}, "samples": 17084, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00003.mds.zstd", "bytes": 7933143, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00004.mds", "bytes": 39350255, "hashes": {}}, "samples": 10511, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00004.mds.zstd", "bytes": 3677122, "hashes": {}}}], "version": 2}
|
train/fineweb2-sampled-decay-v2/dan_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00002-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
train/fineweb2-sampled-decay-v2/dan_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00002-batch_0001-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:119fa5618481cbf939087356afdcdc2003d6b2c1af30ceb3ca0504ab554f9fd4
|
3 |
+
size 67100039
|
train/fineweb2-sampled-decay-v2/dan_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00002-batch_0001-tokenized-chunked-8192-512-32-backfill-nodups/shard.00004.mds
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:6358317234472b6acea4cab2bfeafa03e97f42f3ac99d81c9d5f66898e09e3e3
|
3 |
+
size 32382722
|
train/fineweb2-sampled-decay-v2/dan_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00005-batch_0001-tokenized-chunked-8192-512-32-backfill-nodups/shard.00003.mds
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:0def5e0ae1e1bad6122041ff091836847b2cb2bba304da68b6bc5a7e1eb4805e
|
3 |
+
size 49670599
|
train/fineweb2-sampled-decay-v2/dan_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00010-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:ca37138dca82eb794a4cb0306dfc37b3df19ea4b05f497b44f7231c3d9f2d68c
|
3 |
+
size 27635554
|
train/fineweb2-sampled-decay-v2/dan_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00014-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:4b5a406f82f4e8a9e22959be674a52f325f023483ed41d946caab2b0fd9dda96
|
3 |
+
size 53963598
|
train/fineweb2-sampled-decay-v2/dan_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00015-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/index.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67099865, "hashes": {}}, "samples": 20802, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 6614020, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 67108642, "hashes": {}}, "samples": 18038, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 8191953, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00002.mds", "bytes": 67107454, "hashes": {}}, "samples": 17861, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00002.mds.zstd", "bytes": 7200808, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00003.mds", "bytes": 67105556, "hashes": {}}, "samples": 21923, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00003.mds.zstd", "bytes": 6530510, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00004.mds", "bytes": 15879521, "hashes": {}}, "samples": 5266, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00004.mds.zstd", "bytes": 1873728, "hashes": {}}}], "version": 2}
|
train/fineweb2-sampled-decay-v2/dan_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00015-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00004.mds
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:486cf056ff15f23ebdb1779443ccf7cf4c230ad1efd28634e12d59c921630c54
|
3 |
+
size 15879521
|
train/fineweb2-sampled-decay-v2/dan_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00015-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/stats.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"total_duplicated_tokens": 0, "total_tokens_written": 69739844, "total_tokens_skipped": 120, "percentiles": {"0th": 69, "10th": 112, "20th": 147, "30th": 220, "40th": 310, "50th": 393, "60th": 497, "70th": 680, "80th": 1009, "90th": 1793, "95th": 3111, "99th": 8190, "100th": 8191}}
|
train/fineweb2-sampled-decay-v2/dan_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00015-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
train/fineweb2-sampled-decay-v2/dan_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00020-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00003.mds
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:5644629ebd013f7f498f68baa5e34581489517b21f3e725d96cd014504c8ad8c
|
3 |
+
size 47886519
|
train/fineweb2-sampled-decay-v2/dan_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00020-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/shard.00003.mds
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:b732f600e0ff8390f7a8a05cf51acb80011de60f80ab74dfd10e4f54d57248c7
|
3 |
+
size 37520892
|
train/fineweb2-sampled-decay-v2/dan_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00021-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:7a0fa64bee43670f7d45eca7d209f56da3a9380b3204f0145e9de07b40255492
|
3 |
+
size 17661492
|
train/fineweb2-sampled-decay-v2/dan_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00026-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:ec7b6e41f4658223d326721f31738c1a6fbb23be969d20e2c7a11fcb0c909a78
|
3 |
+
size 37018390
|
train/fineweb2-sampled-decay-v2/dan_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00027-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:8628f03e6e8652ce65d42a2d2fa7320153f0369d204b15d70ea9b7c690183a70
|
3 |
+
size 46162182
|
train/fineweb2-sampled-decay-v2/dan_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00029-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:9dacd39677db326075192efdc556c1cafcdfb21b20adada0fb7a5b77c4076413
|
3 |
+
size 44069078
|
train/fineweb2-sampled-decay-v2/dan_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00030-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/index.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67108451, "hashes": {}}, "samples": 17682, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 9200716, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 67100806, "hashes": {}}, "samples": 20721, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 6994630, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00002.mds", "bytes": 67108637, "hashes": {}}, "samples": 17931, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00002.mds.zstd", "bytes": 9116293, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00003.mds", "bytes": 67105193, "hashes": {}}, "samples": 18518, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00003.mds.zstd", "bytes": 7182238, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00004.mds", "bytes": 412116, "hashes": {}}, "samples": 203, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00004.mds.zstd", "bytes": 59393, "hashes": {}}}], "version": 2}
|
train/fineweb2-sampled-decay-v2/dan_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00030-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/shard.00004.mds
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:1807db18560c9fd13d3c3cb0f40acc7d4e6870ec525c0c3eb4005e24863fba63
|
3 |
+
size 412116
|
train/fineweb2-sampled-decay-v2/dan_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00030-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/stats.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"total_duplicated_tokens": 0, "total_tokens_written": 66013869, "total_tokens_skipped": 155, "percentiles": {"0th": 68, "10th": 112, "20th": 151, "30th": 225, "40th": 317, "50th": 401, "60th": 510, "70th": 696, "80th": 1041, "90th": 1908, "95th": 3533, "99th": 8190, "100th": 8191}}
|
train/fineweb2-sampled-decay-v2/dan_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00030-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
train/fineweb2-sampled-decay-v2/dan_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00033-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/shard.00003.mds
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:71af359c1b6a8d3816f804de060264dd16379763225cc9f9e8accb8d0b42e46b
|
3 |
+
size 27170511
|
train/fineweb2-sampled-decay-v2/dan_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00034-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00003.mds
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:2e6b2cb0a4a0948e9ce180062b599b7381adfd10187eddcbcf3998337bd7cc62
|
3 |
+
size 39908929
|
train/fineweb2-sampled-decay-v2/dan_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00043-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:ace36992d0678c6742009ec254be979c2d68630c028607b377a3eae0c84d8707
|
3 |
+
size 39465326
|
train/fineweb2-sampled-decay-v2/dan_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00045-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00003.mds
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:e0a3267cd1db994d5abf196427e372102c50fb0f414e4326d46029070fb91341
|
3 |
+
size 51731049
|
train/fineweb2-sampled-decay-v2/dan_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00047-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:8110c77981142d292c14253da04f79057d5f38155896f0914cc6ca42a95047fc
|
3 |
+
size 67102448
|
train/fineweb2-sampled-decay-v2/dan_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00047-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:0756d90101eca12c98d99c699734b21b888e14f6e493debee725854f693c7ec1
|
3 |
+
size 67103768
|
train/fineweb2-sampled-decay-v2/dan_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00047-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00003.mds
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:91539882a0e759eb9f15a2a3a43ad751dae97dde19be322455e962e417dee837
|
3 |
+
size 59476673
|
train/fineweb2-sampled-decay-v2/dan_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00052-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:ba52481f3609065ffcadcb6c4268fc8206116491c1ac796c56a39b610e4c24da
|
3 |
+
size 67100624
|
train/fineweb2-sampled-decay-v2/dan_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00052-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:dfeaaf87d3413e171b839b009aea8f9092c88965c1bb40093fa187bfd046f52f
|
3 |
+
size 38328679
|
train/fineweb2-sampled-decay-v2/ind_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00035-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:5743fc0d8772f575a302b3320f4e5d4ab20c7d9fe7a8b40dfa48ccc4ee2ebc6f
|
3 |
+
size 67108603
|
train/fineweb2-sampled-decay-v2/ind_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00035-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:fe26c98a1b36c0e36cbbdec5606574c13545a030eb867629b7181027e2a26794
|
3 |
+
size 67107688
|
train/fineweb2-sampled-decay-v2/ind_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00087-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:067a1b52ee842cf8dec4c7a60a31cf8518c264e2691836eb4261c6125b806640
|
3 |
+
size 67107130
|
train/fineweb2-sampled-decay-v2/lvs_Latn_train-sampled/batch_0013-tokenized-chunked-8192-512-32-backfill-nodups/index.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67094730, "hashes": {}}, "samples": 11688, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 16145754, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 55084849, "hashes": {}}, "samples": 9309, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 13893987, "hashes": {}}}], "version": 2}
|
train/fineweb2-sampled-decay-v2/lvs_Latn_train-sampled/batch_0013-tokenized-chunked-8192-512-32-backfill-nodups/stats.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"total_duplicated_tokens": 0, "total_tokens_written": 30209537, "total_tokens_skipped": 51, "percentiles": {"0th": 101, "10th": 223, "20th": 320, "30th": 423, "40th": 557, "50th": 710, "60th": 913, "70th": 1221, "80th": 1791, "90th": 3577, "95th": 7495, "99th": 8191, "100th": 8191}}
|
train/fineweb2-sampled-decay-v2/lvs_Latn_train-sampled/batch_0013-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
train/fineweb2-sampled-decay-v2/lvs_Latn_train-sampled/batch_0017-tokenized-chunked-8192-512-32-backfill-nodups/index.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67089484, "hashes": {}}, "samples": 11401, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 16877683, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 53482826, "hashes": {}}, "samples": 9557, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 13028740, "hashes": {}}}], "version": 2}
|
train/fineweb2-sampled-decay-v2/lvs_Latn_train-sampled/batch_0017-tokenized-chunked-8192-512-32-backfill-nodups/stats.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"total_duplicated_tokens": 0, "total_tokens_written": 29808388, "total_tokens_skipped": 146, "percentiles": {"0th": 100, "10th": 217, "20th": 306, "30th": 416, "40th": 546, "50th": 703, "60th": 914, "70th": 1217, "80th": 1792, "90th": 3469, "95th": 7312, "99th": 8191, "100th": 8191}}
|
train/fineweb2-sampled-decay-v2/lvs_Latn_train-sampled/batch_0017-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
train/fineweb2-sampled-decay-v2/lvs_Latn_train-sampled/batch_0022-tokenized-chunked-8192-512-32-backfill-nodups/index.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67107323, "hashes": {}}, "samples": 11628, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 17209317, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 55515206, "hashes": {}}, "samples": 9305, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 14394445, "hashes": {}}}], "version": 2}
|
train/fineweb2-sampled-decay-v2/lvs_Latn_train-sampled/batch_0022-tokenized-chunked-8192-512-32-backfill-nodups/stats.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"total_duplicated_tokens": 0, "total_tokens_written": 30321319, "total_tokens_skipped": 74, "percentiles": {"0th": 91, "10th": 220, "20th": 316, "30th": 428, "40th": 562, "50th": 716, "60th": 935, "70th": 1249, "80th": 1820, "90th": 3627, "95th": 7353, "99th": 8191, "100th": 8191}}
|
train/fineweb2-sampled-decay-v2/lvs_Latn_train-sampled/batch_0022-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
train/fineweb2-sampled-decay-v2/lvs_Latn_train-sampled/batch_0033-tokenized-chunked-8192-512-32-backfill-nodups/index.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67098300, "hashes": {}}, "samples": 11350, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 17613214, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 53952342, "hashes": {}}, "samples": 9554, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 14160700, "hashes": {}}}], "version": 2}
|
train/fineweb2-sampled-decay-v2/lvs_Latn_train-sampled/batch_0033-tokenized-chunked-8192-512-32-backfill-nodups/stats.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"total_duplicated_tokens": 0, "total_tokens_written": 29928807, "total_tokens_skipped": 87, "percentiles": {"0th": 90, "10th": 222, "20th": 318, "30th": 419, "40th": 541, "50th": 709, "60th": 916, "70th": 1237, "80th": 1804, "90th": 3546, "95th": 7113, "99th": 8191, "100th": 8191}}
|
train/fineweb2-sampled-decay-v2/lvs_Latn_train-sampled/batch_0033-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
train/fineweb2-sampled-decay-v2/lvs_Latn_train-sampled/batch_0079-tokenized-chunked-8192-512-32-backfill-nodups/index.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67108084, "hashes": {}}, "samples": 10109, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 20207181, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 67104494, "hashes": {}}, "samples": 10457, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 19571285, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00002.mds", "bytes": 4934916, "hashes": {}}, "samples": 623, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00002.mds.zstd", "bytes": 1519719, "hashes": {}}}], "version": 2}
|
train/fineweb2-sampled-decay-v2/lvs_Latn_train-sampled/batch_0079-tokenized-chunked-8192-512-32-backfill-nodups/stats.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"total_duplicated_tokens": 0, "total_tokens_written": 34448417, "total_tokens_skipped": 86, "percentiles": {"0th": 88, "10th": 222, "20th": 323, "30th": 441, "40th": 578, "50th": 750, "60th": 1007, "70th": 1378, "80th": 2136, "90th": 4912, "95th": 8190, "99th": 8191, "100th": 8191}}
|
train/fineweb2-sampled-decay-v2/lvs_Latn_train-sampled/batch_0079-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|
train/fineweb2-sampled-decay-v2/lvs_Latn_train-sampled/batch_0127-tokenized-chunked-8192-512-32-backfill-nodups/index.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67104427, "hashes": {}}, "samples": 13209, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 12768550, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 39348970, "hashes": {}}, "samples": 7468, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 7813185, "hashes": {}}}], "version": 2}
|
train/fineweb2-sampled-decay-v2/lvs_Latn_train-sampled/batch_0127-tokenized-chunked-8192-512-32-backfill-nodups/stats.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"total_duplicated_tokens": 0, "total_tokens_written": 26283156, "total_tokens_skipped": 78, "percentiles": {"0th": 94, "10th": 217, "20th": 311, "30th": 405, "40th": 529, "50th": 675, "60th": 861, "70th": 1126, "80th": 1592, "90th": 2877, "95th": 5332, "99th": 8191, "100th": 8191}}
|