Datasets:

ArXiv:
License:
orionweller commited on
Commit
114c389
·
verified ·
1 Parent(s): fd7b432

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. train/dclm-filtered_sampled-ext/split_1021-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds +3 -0
  2. train/dclm-filtered_sampled-ext/split_10228-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  3. train/dclm-filtered_sampled-ext/split_10228-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  4. train/dclm-filtered_sampled-ext/split_10228-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  5. train/dclm-filtered_sampled-ext/split_10477-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  6. train/dclm-filtered_sampled-ext/split_10477-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  7. train/dclm-filtered_sampled-ext/split_10477-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  8. train/dclm-filtered_sampled-ext/split_10530-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds +3 -0
  9. train/dclm-filtered_sampled-ext/split_10530-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds +3 -0
  10. train/dclm-filtered_sampled-ext/split_10945-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds +3 -0
  11. train/dclm-filtered_sampled-ext/split_113-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds +3 -0
  12. train/dclm-filtered_sampled-ext/split_11429-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  13. train/dclm-filtered_sampled-ext/split_11429-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  14. train/dclm-filtered_sampled-ext/split_11429-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  15. train/dclm-filtered_sampled-ext/split_11641-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds +3 -0
  16. train/dclm-filtered_sampled-ext/split_11641-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds +3 -0
  17. train/dclm-filtered_sampled-ext/split_11961-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds +3 -0
  18. train/dclm-filtered_sampled-ext/split_12227-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  19. train/dclm-filtered_sampled-ext/split_12227-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  20. train/dclm-filtered_sampled-ext/split_12227-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  21. train/dclm-filtered_sampled-ext/split_12342-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds +3 -0
  22. train/dclm-filtered_sampled-ext/split_12342-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds +3 -0
  23. train/dclm-filtered_sampled-ext/split_12379-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds +3 -0
  24. train/dclm-filtered_sampled-ext/split_12482-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  25. train/dclm-filtered_sampled-ext/split_12482-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds +3 -0
  26. train/dclm-filtered_sampled-ext/split_12482-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  27. train/dclm-filtered_sampled-ext/split_12482-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  28. train/dclm-filtered_sampled-ext/split_12571-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds +3 -0
  29. train/dclm-filtered_sampled-ext/split_12571-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds +3 -0
  30. train/dclm-filtered_sampled-ext/split_12706-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds +3 -0
  31. train/dclm-filtered_sampled-ext/split_12706-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds +3 -0
  32. train/dclm-filtered_sampled-ext/split_12727-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  33. train/dclm-filtered_sampled-ext/split_12727-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds +3 -0
  34. train/dclm-filtered_sampled-ext/split_12727-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  35. train/dclm-filtered_sampled-ext/split_12727-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  36. train/dclm-filtered_sampled-ext/split_12776-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  37. train/dclm-filtered_sampled-ext/split_12776-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  38. train/dclm-filtered_sampled-ext/split_12776-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  39. train/dclm-filtered_sampled-ext/split_12862-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds +3 -0
  40. train/dclm-filtered_sampled-ext/split_1287-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  41. train/dclm-filtered_sampled-ext/split_1287-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  42. train/dclm-filtered_sampled-ext/split_1287-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  43. train/dclm-filtered_sampled-ext/split_1302-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  44. train/dclm-filtered_sampled-ext/split_1302-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds +3 -0
  45. train/dclm-filtered_sampled-ext/split_1302-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  46. train/dclm-filtered_sampled-ext/split_1302-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  47. train/dclm-filtered_sampled-ext/split_13266-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds +3 -0
  48. train/dclm-filtered_sampled-ext/split_13266-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds +3 -0
  49. train/dclm-filtered_sampled-ext/split_13374-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  50. train/dclm-filtered_sampled-ext/split_13374-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds +3 -0
train/dclm-filtered_sampled-ext/split_1021-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:48b6c858ff413581393e290318f739e81b7db6004bf0363aee300e0bd63e2b4b
3
+ size 16720931
train/dclm-filtered_sampled-ext/split_10228-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67097966, "hashes": {}}, "samples": 14179, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 28113290, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 67106591, "hashes": {}}, "samples": 13914, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 28079765, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00002.mds", "bytes": 11798797, "hashes": {}}, "samples": 2476, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00002.mds.zstd", "bytes": 4938359, "hashes": {}}}], "version": 2}
train/dclm-filtered_sampled-ext/split_10228-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 35853037, "total_tokens_skipped": 14, "percentiles": {"0th": 38, "10th": 226, "20th": 337, "30th": 443, "40th": 562, "50th": 691, "60th": 860, "70th": 1093, "80th": 1491, "90th": 2510, "95th": 4187, "99th": 8190, "100th": 8191}}
train/dclm-filtered_sampled-ext/split_10228-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/dclm-filtered_sampled-ext/split_10477-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67086255, "hashes": {}}, "samples": 13661, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 27963860, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 67097668, "hashes": {}}, "samples": 13446, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 28016528, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00002.mds", "bytes": 16549313, "hashes": {}}, "samples": 3478, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00002.mds.zstd", "bytes": 6903551, "hashes": {}}}], "version": 2}
train/dclm-filtered_sampled-ext/split_10477-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 37030967, "total_tokens_skipped": 29, "percentiles": {"0th": 41, "10th": 227, "20th": 342, "30th": 452, "40th": 577, "50th": 717, "60th": 892, "70th": 1146, "80th": 1566, "90th": 2613, "95th": 4272, "99th": 8190, "100th": 8191}}
train/dclm-filtered_sampled-ext/split_10477-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/dclm-filtered_sampled-ext/split_10530-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f3d4b51d6c0d61fa7c11461712881b475ac1f41024186768fd7fc98b8368da5f
3
+ size 67105042
train/dclm-filtered_sampled-ext/split_10530-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cb897db9e5215df62b75d9d526bcd9de7deaacaebf8dbe081425fa0004e6de65
3
+ size 67090213
train/dclm-filtered_sampled-ext/split_10945-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:716299fcb0ac8b3723ad098be731aa2615affd22c42436648eb521035c2bffc6
3
+ size 15297463
train/dclm-filtered_sampled-ext/split_113-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2fe883d25d206a87686d7f472a9d097f2e15e84710102ccccf5c174e6dfe2161
3
+ size 22213902
train/dclm-filtered_sampled-ext/split_11429-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67106862, "hashes": {}}, "samples": 12790, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 27809435, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 67097137, "hashes": {}}, "samples": 12759, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 27775051, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00002.mds", "bytes": 27592976, "hashes": {}}, "samples": 5245, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00002.mds.zstd", "bytes": 11450877, "hashes": {}}}], "version": 2}
train/dclm-filtered_sampled-ext/split_11429-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 39779927, "total_tokens_skipped": 70, "percentiles": {"0th": 37, "10th": 223, "20th": 345, "30th": 464, "40th": 598, "50th": 741, "60th": 935, "70th": 1207, "80th": 1667, "90th": 2898, "95th": 4859, "99th": 8190, "100th": 8191}}
train/dclm-filtered_sampled-ext/split_11429-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/dclm-filtered_sampled-ext/split_11641-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a71c72047c29ef01a1feba8d756b973d18c3ef5667947be15aac6dfdbf84ae05
3
+ size 67104142
train/dclm-filtered_sampled-ext/split_11641-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:25c312601ada1ae57c5439d80343c43c3e59f095c6215cca12370cff6055dded
3
+ size 67103212
train/dclm-filtered_sampled-ext/split_11961-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c52bed91e9f4667bb4c2e48adb3b80db0512ab4fd4bac5b84e24d4167f566782
3
+ size 67105351
train/dclm-filtered_sampled-ext/split_12227-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67107833, "hashes": {}}, "samples": 13868, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 27912969, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 67102369, "hashes": {}}, "samples": 13687, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 27918089, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00002.mds", "bytes": 14495754, "hashes": {}}, "samples": 2962, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00002.mds.zstd", "bytes": 6029543, "hashes": {}}}], "version": 2}
train/dclm-filtered_sampled-ext/split_12227-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 36492475, "total_tokens_skipped": 29, "percentiles": {"0th": 41, "10th": 233, "20th": 367, "30th": 496, "40th": 627, "50th": 768, "60th": 945, "70th": 1177, "80th": 1556, "90th": 2460, "95th": 3803, "99th": 8190, "100th": 8191}}
train/dclm-filtered_sampled-ext/split_12227-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/dclm-filtered_sampled-ext/split_12342-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3846712f42050632ffd96ba7d04fb5f859e3fa1d4bf0b4ac8b3e01a8530ae6bf
3
+ size 67107058
train/dclm-filtered_sampled-ext/split_12342-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:21838910bfe484e17817a551891ef83b11d296548455730d19a211ebc86cddb3
3
+ size 67103085
train/dclm-filtered_sampled-ext/split_12379-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:90104975d6cdae45ea4edbbae6f37f63112013a097d2c117c380904d7f18c7bb
3
+ size 16270254
train/dclm-filtered_sampled-ext/split_12482-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67098897, "hashes": {}}, "samples": 13125, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 27749704, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 67085626, "hashes": {}}, "samples": 13027, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 27784711, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00002.mds", "bytes": 24556602, "hashes": {}}, "samples": 4532, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00002.mds.zstd", "bytes": 10166026, "hashes": {}}}], "version": 2}
train/dclm-filtered_sampled-ext/split_12482-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:90c47f11697f808cac3ba469593583c82f3f0adefb50f70234fe643431aa3873
3
+ size 24556602
train/dclm-filtered_sampled-ext/split_12482-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 39020165, "total_tokens_skipped": 136, "percentiles": {"0th": 35, "10th": 222, "20th": 338, "30th": 450, "40th": 577, "50th": 727, "60th": 916, "70th": 1190, "80th": 1662, "90th": 2859, "95th": 4804, "99th": 8190, "100th": 8191}}
train/dclm-filtered_sampled-ext/split_12482-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/dclm-filtered_sampled-ext/split_12571-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ed266d4d8b30a6d5debd5c91f72f8bffb29655188dd78659b53c229109039099
3
+ size 67090771
train/dclm-filtered_sampled-ext/split_12571-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c0257c86b7250aff0fbc7678ed62713aaa9a6e151be4f7ecaf8fea7329f6f88a
3
+ size 67101256
train/dclm-filtered_sampled-ext/split_12706-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:14d469294a4c5c008ebd048f860f3a85166534596c978de8444ca350cfb0ac84
3
+ size 67104230
train/dclm-filtered_sampled-ext/split_12706-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:01de09f636a5c5e9e3fe69911124ee276b70d8074b4fa40868ac168b4e8348f8
3
+ size 67100325
train/dclm-filtered_sampled-ext/split_12727-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67108264, "hashes": {}}, "samples": 12847, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 27837020, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 67100019, "hashes": {}}, "samples": 13146, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 27895393, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00002.mds", "bytes": 22778515, "hashes": {}}, "samples": 4709, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00002.mds.zstd", "bytes": 9534498, "hashes": {}}}], "version": 2}
train/dclm-filtered_sampled-ext/split_12727-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:be1db02f9030ec5f5cac0bd0ab5479402509b57a9fb70e3028c630f07c27facd
3
+ size 22778515
train/dclm-filtered_sampled-ext/split_12727-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 38587226, "total_tokens_skipped": 128, "percentiles": {"0th": 34, "10th": 224, "20th": 341, "30th": 456, "40th": 587, "50th": 732, "60th": 913, "70th": 1168, "80th": 1605, "90th": 2764, "95th": 4689, "99th": 8190, "100th": 8191}}
train/dclm-filtered_sampled-ext/split_12727-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/dclm-filtered_sampled-ext/split_12776-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67102969, "hashes": {}}, "samples": 13689, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 28031533, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 67103961, "hashes": {}}, "samples": 13385, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 27756062, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00002.mds", "bytes": 18508572, "hashes": {}}, "samples": 3550, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00002.mds.zstd", "bytes": 7661891, "hashes": {}}}], "version": 2}
train/dclm-filtered_sampled-ext/split_12776-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 37521446, "total_tokens_skipped": 29, "percentiles": {"0th": 42, "10th": 222, "20th": 335, "30th": 445, "40th": 568, "50th": 708, "60th": 892, "70th": 1147, "80th": 1592, "90th": 2718, "95th": 4441, "99th": 8190, "100th": 8191}}
train/dclm-filtered_sampled-ext/split_12776-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/dclm-filtered_sampled-ext/split_12862-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:eccab302732cf396bfcc209fad480824d75329559f5059b61ec85fc7ef892376
3
+ size 17561670
train/dclm-filtered_sampled-ext/split_1287-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67086814, "hashes": {}}, "samples": 12818, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 27735516, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 67088673, "hashes": {}}, "samples": 12443, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 27769566, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00002.mds", "bytes": 29412142, "hashes": {}}, "samples": 5480, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00002.mds.zstd", "bytes": 12229021, "hashes": {}}}], "version": 2}
train/dclm-filtered_sampled-ext/split_1287-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 40230104, "total_tokens_skipped": 46, "percentiles": {"0th": 40, "10th": 228, "20th": 348, "30th": 466, "40th": 599, "50th": 746, "60th": 939, "70th": 1210, "80th": 1706, "90th": 2996, "95th": 4995, "99th": 8190, "100th": 8191}}
train/dclm-filtered_sampled-ext/split_1287-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/dclm-filtered_sampled-ext/split_1302-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67107491, "hashes": {}}, "samples": 14145, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 28092518, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 67108090, "hashes": {}}, "samples": 14082, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 28068440, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00002.mds", "bytes": 11387326, "hashes": {}}, "samples": 2303, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00002.mds.zstd", "bytes": 4771540, "hashes": {}}}], "version": 2}
train/dclm-filtered_sampled-ext/split_1302-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4d4f6dc71d2d15f9eb23b601e5c558330b47d988e204a2b318f953e037ef427b
3
+ size 11387326
train/dclm-filtered_sampled-ext/split_1302-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 35750801, "total_tokens_skipped": 30, "percentiles": {"0th": 38, "10th": 221, "20th": 348, "30th": 464, "40th": 584, "50th": 719, "60th": 883, "70th": 1120, "80th": 1512, "90th": 2452, "95th": 3915, "99th": 8190, "100th": 8191}}
train/dclm-filtered_sampled-ext/split_1302-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/dclm-filtered_sampled-ext/split_13266-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2e0a47df79473617ac19078a42f09be621377f3d804134a15a1714c4b68bf324
3
+ size 67104895
train/dclm-filtered_sampled-ext/split_13266-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fb2939aed36ad0be741f8fc6644f5f9932b02e37c26ef38962d8722132df5420
3
+ size 67108180
train/dclm-filtered_sampled-ext/split_13374-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67108470, "hashes": {}}, "samples": 12875, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 27795182, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 67106668, "hashes": {}}, "samples": 13498, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 27877230, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00002.mds", "bytes": 20182138, "hashes": {}}, "samples": 4181, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00002.mds.zstd", "bytes": 8394078, "hashes": {}}}], "version": 2}
train/dclm-filtered_sampled-ext/split_13374-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c414d61523c9b3742580a3af2af714b64bfd3bedfbd5f99e491a1bf3cb8efcad
3
+ size 20182138