Datasets:

ArXiv:
License:
orionweller commited on
Commit
71f96b2
·
verified ·
1 Parent(s): 5ba2ed0

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. train/fineweb2-sampled-decay-v2/ars_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0004-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  2. train/fineweb2-sampled-decay-v2/ars_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0004-tokenized-chunked-8192-512-32-backfill-nodups/shard.00004.mds +3 -0
  3. train/fineweb2-sampled-decay-v2/ars_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0004-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  4. train/fineweb2-sampled-decay-v2/ars_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0004-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  5. train/fineweb2-sampled-decay-v2/ars_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0005-tokenized-chunked-8192-512-32-backfill-nodups/shard.00003.mds +3 -0
  6. train/fineweb2-sampled-decay-v2/ars_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0007-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  7. train/fineweb2-sampled-decay-v2/ars_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0007-tokenized-chunked-8192-512-32-backfill-nodups/shard.00003.mds +3 -0
  8. train/fineweb2-sampled-decay-v2/ars_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0021-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds +3 -0
  9. train/fineweb2-sampled-decay-v2/ars_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0048-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  10. train/fineweb2-sampled-decay-v2/ars_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0048-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds +3 -0
  11. train/fineweb2-sampled-decay-v2/ars_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0048-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  12. train/fineweb2-sampled-decay-v2/ars_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0048-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  13. train/fineweb2-sampled-decay-v2/ars_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0050-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds +3 -0
  14. train/fineweb2-sampled-decay-v2/ars_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0051-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds +3 -0
  15. train/fineweb2-sampled-decay-v2/ars_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0051-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  16. train/fineweb2-sampled-decay-v2/ars_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0053-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  17. train/fineweb2-sampled-decay-v2/ars_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0053-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds +3 -0
  18. train/fineweb2-sampled-decay-v2/ars_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0053-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  19. train/fineweb2-sampled-decay-v2/ars_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0053-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  20. train/fineweb2-sampled-decay-v2/ars_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0058-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds +3 -0
  21. train/fineweb2-sampled-decay-v2/ars_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0063-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  22. train/fineweb2-sampled-decay-v2/ars_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0063-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds +3 -0
  23. train/fineweb2-sampled-decay-v2/ars_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0063-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  24. train/fineweb2-sampled-decay-v2/ars_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0063-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  25. train/fineweb2-sampled-decay-v2/ars_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0067-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  26. train/fineweb2-sampled-decay-v2/ars_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0067-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds +3 -0
  27. train/fineweb2-sampled-decay-v2/ars_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0067-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  28. train/fineweb2-sampled-decay-v2/ars_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0067-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  29. train/fineweb2-sampled-decay-v2/hun_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00002-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds +3 -0
  30. train/fineweb2-sampled-decay-v2/hun_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00002-batch_0001-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds +3 -0
  31. train/fineweb2-sampled-decay-v2/hun_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00002-batch_0001-tokenized-chunked-8192-512-32-backfill-nodups/shard.00003.mds +3 -0
  32. train/fineweb2-sampled-decay-v2/hun_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00002-batch_0001-tokenized-chunked-8192-512-32-backfill-nodups/shard.00005.mds +3 -0
  33. train/fineweb2-sampled-decay-v2/hun_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00004-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds +3 -0
  34. train/fineweb2-sampled-decay-v2/hun_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00006-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds +3 -0
  35. train/fineweb2-sampled-decay-v2/hun_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00006-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds +3 -0
  36. train/fineweb2-sampled-decay-v2/hun_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00006-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00003.mds +3 -0
  37. train/fineweb2-sampled-decay-v2/hun_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00009-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds +3 -0
  38. train/fineweb2-sampled-decay-v2/hun_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00009-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/shard.00003.mds +3 -0
  39. train/fineweb2-sampled-decay-v2/hun_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00009-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds +3 -0
  40. train/fineweb2-sampled-decay-v2/hun_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00023-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds +3 -0
  41. train/fineweb2-sampled-decay-v2/hun_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00023-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds +3 -0
  42. train/fineweb2-sampled-decay-v2/hun_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00029-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds +3 -0
  43. train/fineweb2-sampled-decay-v2/hun_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00029-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds +3 -0
  44. train/fineweb2-sampled-decay-v2/hun_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00029-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds +3 -0
  45. train/fineweb2-sampled-decay-v2/hun_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00029-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/shard.00003.mds +3 -0
  46. train/fineweb2-sampled-decay-v2/hun_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00039-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds +3 -0
  47. train/fineweb2-sampled-decay-v2/hun_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00045-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds +3 -0
  48. train/fineweb2-sampled-decay-v2/hun_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00045-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds +3 -0
  49. train/fineweb2-sampled-decay-v2/hun_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00045-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds +3 -0
  50. train/fineweb2-sampled-decay-v2/hun_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00045-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00003.mds +3 -0
train/fineweb2-sampled-decay-v2/ars_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0004-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67107947, "hashes": {}}, "samples": 27256, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 7323348, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 67089491, "hashes": {}}, "samples": 7996, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 6399377, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00002.mds", "bytes": 67082665, "hashes": {}}, "samples": 9664, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00002.mds.zstd", "bytes": 6198657, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00003.mds", "bytes": 67090885, "hashes": {}}, "samples": 25619, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00003.mds.zstd", "bytes": 7133640, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00004.mds", "bytes": 27049793, "hashes": {}}, "samples": 9651, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00004.mds.zstd", "bytes": 2638377, "hashes": {}}}], "version": 2}
train/fineweb2-sampled-decay-v2/ars_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0004-tokenized-chunked-8192-512-32-backfill-nodups/shard.00004.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d6967bbaab4489f0ea0a9759bf8d85f93e7a5176c71373f6407d7805f7072315
3
+ size 27049793
train/fineweb2-sampled-decay-v2/ars_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0004-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 72580202, "total_tokens_skipped": 6, "percentiles": {"0th": 82, "10th": 130, "20th": 162, "30th": 201, "40th": 249, "50th": 316, "60th": 415, "70th": 572, "80th": 867, "90th": 1798, "95th": 5081, "99th": 8191, "100th": 8191}}
train/fineweb2-sampled-decay-v2/ars_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0004-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/fineweb2-sampled-decay-v2/ars_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0005-tokenized-chunked-8192-512-32-backfill-nodups/shard.00003.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:91efe8937497f44d4b0930c94af0d6bf7147435a889dc4c357034256430db393
3
+ size 60313877
train/fineweb2-sampled-decay-v2/ars_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0007-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67108217, "hashes": {}}, "samples": 30813, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 7760506, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 67106953, "hashes": {}}, "samples": 12937, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 6672258, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00002.mds", "bytes": 67108766, "hashes": {}}, "samples": 11214, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00002.mds.zstd", "bytes": 6338977, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00003.mds", "bytes": 44809906, "hashes": {}}, "samples": 21030, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00003.mds.zstd", "bytes": 5526425, "hashes": {}}}], "version": 2}
train/fineweb2-sampled-decay-v2/ars_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0007-tokenized-chunked-8192-512-32-backfill-nodups/shard.00003.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5b8aecc18e694798969901eee04e9952ab436f8058a1931a1d77c4e2e2b69e04
3
+ size 44809906
train/fineweb2-sampled-decay-v2/ars_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0021-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b81d0d0377dbd034cab12f67afe0cf754bd75a37ecc53bc1c85883d85e060c90
3
+ size 36647674
train/fineweb2-sampled-decay-v2/ars_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0048-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67107695, "hashes": {}}, "samples": 26713, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 15216105, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 27010800, "hashes": {}}, "samples": 10680, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 6279699, "hashes": {}}}], "version": 2}
train/fineweb2-sampled-decay-v2/ars_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0048-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c4634e78a7c87eff301c80ff224972e150a64846541b4a7883d4355447085ccc
3
+ size 27010800
train/fineweb2-sampled-decay-v2/ars_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0048-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 22935221, "total_tokens_skipped": 11, "percentiles": {"0th": 77, "10th": 131, "20th": 160, "30th": 196, "40th": 242, "50th": 301, "60th": 382, "70th": 506, "80th": 722, "90th": 1185, "95th": 1962, "99th": 7800, "100th": 8191}}
train/fineweb2-sampled-decay-v2/ars_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0048-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/fineweb2-sampled-decay-v2/ars_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0050-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0548e773183d72788c8d40bcbf99cb4ef265d97f22ace1db98f5b716cf684a30
3
+ size 32112520
train/fineweb2-sampled-decay-v2/ars_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0051-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:986cf62d9bff2f3adceb4709b1653d659378e7a7ca57c5faf471a8a91f3896c9
3
+ size 19071668
train/fineweb2-sampled-decay-v2/ars_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0051-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/fineweb2-sampled-decay-v2/ars_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0053-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67108282, "hashes": {}}, "samples": 27122, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 16704394, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 15332332, "hashes": {}}, "samples": 5985, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 3923952, "hashes": {}}}], "version": 2}
train/fineweb2-sampled-decay-v2/ars_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0053-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0bce2db15d4ede252865bebf7393bb0c0e0363a9735fdd3b4d7d4c883afbf39b
3
+ size 15332332
train/fineweb2-sampled-decay-v2/ars_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0053-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 20083953, "total_tokens_skipped": 88, "percentiles": {"0th": 77, "10th": 132, "20th": 161, "30th": 194, "40th": 237, "50th": 296, "60th": 384, "70th": 519, "80th": 739, "90th": 1189, "95th": 1935, "99th": 6831, "100th": 8191}}
train/fineweb2-sampled-decay-v2/ars_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0053-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/fineweb2-sampled-decay-v2/ars_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0058-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:14e62a7bd4852bb446fd20356c157d629083ff0998a4d8b0314350b288a0b1e2
3
+ size 11926963
train/fineweb2-sampled-decay-v2/ars_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0063-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67085812, "hashes": {}}, "samples": 24483, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 19958519, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 7390688, "hashes": {}}, "samples": 2700, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 2177160, "hashes": {}}}], "version": 2}
train/fineweb2-sampled-decay-v2/ars_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0063-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:53598cca120e02ee109f21f83d0f31c8e85480283367ccc6bad024c0d70589fa
3
+ size 7390688
train/fineweb2-sampled-decay-v2/ars_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0063-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 18186934, "total_tokens_skipped": 26, "percentiles": {"0th": 77, "10th": 134, "20th": 164, "30th": 200, "40th": 246, "50th": 307, "60th": 399, "70th": 538, "80th": 765, "90th": 1273, "95th": 2113, "99th": 8190, "100th": 8191}}
train/fineweb2-sampled-decay-v2/ars_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0063-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/fineweb2-sampled-decay-v2/ars_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0067-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67100272, "hashes": {}}, "samples": 24154, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 20698447, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 3488957, "hashes": {}}, "samples": 1345, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 1163445, "hashes": {}}}], "version": 2}
train/fineweb2-sampled-decay-v2/ars_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0067-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:75ccebac9d75fc7d5aa666cf85248b2dbb09f290aa4bccbb61acb7d0e9042c57
3
+ size 3488957
train/fineweb2-sampled-decay-v2/ars_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0067-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 17241832, "total_tokens_skipped": 0, "percentiles": {"0th": 78, "10th": 136, "20th": 167, "30th": 205, "40th": 251, "50th": 318, "60th": 415, "70th": 556, "80th": 786, "90th": 1255, "95th": 2106, "99th": 8190, "100th": 8191}}
train/fineweb2-sampled-decay-v2/ars_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0067-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/fineweb2-sampled-decay-v2/hun_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00002-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:31fbda5bf3b37391ee00e88310538c05d775047215077e9607df454d57f3885f
3
+ size 67107519
train/fineweb2-sampled-decay-v2/hun_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00002-batch_0001-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c2bb50dc0f559a1c86172f6c431fd336f595c2081e1956cd262ed9fb2683e19f
3
+ size 67095372
train/fineweb2-sampled-decay-v2/hun_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00002-batch_0001-tokenized-chunked-8192-512-32-backfill-nodups/shard.00003.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7c58f63029e1df09d063c7e916df0a6f67380c88de9679f6c5a37847cb74092c
3
+ size 67108797
train/fineweb2-sampled-decay-v2/hun_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00002-batch_0001-tokenized-chunked-8192-512-32-backfill-nodups/shard.00005.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0cc26fe2ce1e880b18a1e36db3ca09c78f1b02497089520dc1b7c4cbc8f31ef3
3
+ size 57923388
train/fineweb2-sampled-decay-v2/hun_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00004-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e02708b2cadc424e0248a38c06d09eace3b7db1cfc9a3cc2c1aa5f6a7ff464e7
3
+ size 67086592
train/fineweb2-sampled-decay-v2/hun_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00006-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:95e4705aa127facfe7296174d50d624f9602e792ddf25c20636c647bdc834d40
3
+ size 67106965
train/fineweb2-sampled-decay-v2/hun_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00006-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a929bf54d5f984f7327e7418d5e374809eb441ee005829360ad439a618722808
3
+ size 67095557
train/fineweb2-sampled-decay-v2/hun_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00006-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00003.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:63f1719172c6d0344629211774168041b5b1b7b9e3418cb4d5fd05f34ad1ad1f
3
+ size 67108537
train/fineweb2-sampled-decay-v2/hun_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00009-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dcdcefd33710a52afc94fa8c99461e0f586eda84d6bad77e59a1c37810ce4cd7
3
+ size 67106753
train/fineweb2-sampled-decay-v2/hun_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00009-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/shard.00003.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:21ba2148baa9b2cf1bf536ce038600dc3af1ab419b27c31a48918c9dc110692a
3
+ size 67105159
train/fineweb2-sampled-decay-v2/hun_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00009-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9beaa4de37c1461e32bc0877949459eada2bd275c4785583652837e7f5c5f302
3
+ size 67081492
train/fineweb2-sampled-decay-v2/hun_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00023-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2169c6ffd8b523b5fff28f2229c00816982efa0d6d40f11614794c2e5f8215da
3
+ size 67107892
train/fineweb2-sampled-decay-v2/hun_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00023-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:35c494a43c834850d40fcfa8cb26d9e5af9e82054712af7b50124746210f0c11
3
+ size 67104421
train/fineweb2-sampled-decay-v2/hun_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00029-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cc4e6226eb6097c6948fcc1ecce32ddaaa209c3b9cfd0265d920a46f81f92606
3
+ size 67102496
train/fineweb2-sampled-decay-v2/hun_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00029-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:902f2d99d18058514cfb30e15dc8a2f374e8d2b33ea6b400c5ec839ba69c0349
3
+ size 67105518
train/fineweb2-sampled-decay-v2/hun_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00029-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2b9806486fcf4c8f0e3bb5a1e472619c647c46cfe16612551f863a3b61a34e47
3
+ size 67108426
train/fineweb2-sampled-decay-v2/hun_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00029-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/shard.00003.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:37f3177e8ae074bb8b2f4316787be57e5c93f9e035c4d808458b2c1214b4802a
3
+ size 67089713
train/fineweb2-sampled-decay-v2/hun_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00039-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:28c2a0a1c0bee2b86862e558197150ad05dcd8be0b641fea3f7385d996a168b4
3
+ size 58846092
train/fineweb2-sampled-decay-v2/hun_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00045-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:13605978931ac2172545c5babb444b23a3d96fa5a3ba03442c23b0ab6e9d6020
3
+ size 67099420
train/fineweb2-sampled-decay-v2/hun_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00045-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:64f9cccf846976925adfeba1c8cfde9966cb7fc0bacd4382b6fe5afe63bee855
3
+ size 67100958
train/fineweb2-sampled-decay-v2/hun_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00045-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:18b1fcb69ac53ce1ec15fe6df0271056d181ed53dcf6229942771ab52a8245a5
3
+ size 67092851
train/fineweb2-sampled-decay-v2/hun_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00045-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00003.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c0a4e4fb9fd0e1078f8f0037873185acfffc5b2f3eae7b1553b4d694d2832ad4
3
+ size 67099697