Datasets:

ArXiv:
License:
orionweller commited on
Commit
ffe2191
·
verified ·
1 Parent(s): ed4b4d9

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. train/fineweb2-sampled-decay-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00003-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  2. train/fineweb2-sampled-decay-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00003-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  3. train/fineweb2-sampled-decay-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00003-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  4. train/fineweb2-sampled-decay-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00010-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  5. train/fineweb2-sampled-decay-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00010-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  6. train/fineweb2-sampled-decay-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00010-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  7. train/fineweb2-sampled-decay-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00012-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  8. train/fineweb2-sampled-decay-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00012-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  9. train/fineweb2-sampled-decay-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00012-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  10. train/fineweb2-sampled-decay-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00015-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  11. train/fineweb2-sampled-decay-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00015-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  12. train/fineweb2-sampled-decay-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00015-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  13. train/fineweb2-sampled-decay-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00016-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  14. train/fineweb2-sampled-decay-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00016-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  15. train/fineweb2-sampled-decay-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00016-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  16. train/fineweb2-sampled-decay-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00020-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  17. train/fineweb2-sampled-decay-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00020-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  18. train/fineweb2-sampled-decay-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00020-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  19. train/fineweb2-sampled-decay-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00021-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  20. train/fineweb2-sampled-decay-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00021-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  21. train/fineweb2-sampled-decay-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00021-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  22. train/fineweb2-sampled-decay-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00032-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  23. train/fineweb2-sampled-decay-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00032-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  24. train/fineweb2-sampled-decay-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00032-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  25. train/fineweb2-sampled-decay-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00037-batch_0001-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  26. train/fineweb2-sampled-decay-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00040-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  27. train/fineweb2-sampled-decay-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00040-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  28. train/fineweb2-sampled-decay-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00040-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  29. train/fineweb2-sampled-decay-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00041-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  30. train/fineweb2-sampled-decay-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00041-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  31. train/fineweb2-sampled-decay-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00041-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  32. train/fineweb2-sampled-decay-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00047-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  33. train/fineweb2-sampled-decay-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00047-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00005.mds +3 -0
  34. train/fineweb2-sampled-decay-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00047-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  35. train/fineweb2-sampled-decay-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00047-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  36. train/fineweb2-sampled-decay-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00048-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  37. train/fineweb2-sampled-decay-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00048-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  38. train/fineweb2-sampled-decay-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00048-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  39. train/fineweb2-sampled-decay-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00055-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  40. train/fineweb2-sampled-decay-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00055-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  41. train/fineweb2-sampled-decay-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00055-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  42. train/fineweb2-sampled-decay-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00055-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  43. train/fineweb2-sampled-decay-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00055-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  44. train/fineweb2-sampled-decay-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00055-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  45. train/fineweb2-sampled-decay-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00058-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  46. train/fineweb2-sampled-decay-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00058-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  47. train/fineweb2-sampled-decay-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00058-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  48. train/fineweb2-sampled-decay-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00062-batch_0001-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  49. train/fineweb2-sampled-decay-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00062-batch_0001-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  50. train/fineweb2-sampled-decay-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00062-batch_0001-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
train/fineweb2-sampled-decay-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00003-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67108040, "hashes": {}}, "samples": 17376, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 6072064, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 67105774, "hashes": {}}, "samples": 15806, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 6606384, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00002.mds", "bytes": 67104437, "hashes": {}}, "samples": 13019, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00002.mds.zstd", "bytes": 6915759, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00003.mds", "bytes": 67107814, "hashes": {}}, "samples": 12653, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00003.mds.zstd", "bytes": 7869033, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00004.mds", "bytes": 33503885, "hashes": {}}, "samples": 6138, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00004.mds.zstd", "bytes": 4053024, "hashes": {}}}], "version": 2}
train/fineweb2-sampled-decay-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00003-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 74446784, "total_tokens_skipped": 205, "percentiles": {"0th": 82, "10th": 139, "20th": 200, "30th": 279, "40th": 353, "50th": 429, "60th": 583, "70th": 979, "80th": 1596, "90th": 2879, "95th": 5117, "99th": 8191, "100th": 8191}}
train/fineweb2-sampled-decay-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00003-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/fineweb2-sampled-decay-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00010-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67105784, "hashes": {}}, "samples": 15156, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 9960425, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 67107907, "hashes": {}}, "samples": 16610, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 10512744, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00002.mds", "bytes": 67103123, "hashes": {}}, "samples": 14744, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00002.mds.zstd", "bytes": 9626869, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00003.mds", "bytes": 31468137, "hashes": {}}, "samples": 6962, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00003.mds.zstd", "bytes": 5554541, "hashes": {}}}], "version": 2}
train/fineweb2-sampled-decay-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00010-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 57344217, "total_tokens_skipped": 130, "percentiles": {"0th": 78, "10th": 139, "20th": 195, "30th": 271, "40th": 346, "50th": 419, "60th": 551, "70th": 892, "80th": 1430, "90th": 2603, "95th": 4728, "99th": 8191, "100th": 8191}}
train/fineweb2-sampled-decay-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00010-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/fineweb2-sampled-decay-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00012-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67108336, "hashes": {}}, "samples": 14710, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 10740683, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 67101869, "hashes": {}}, "samples": 14146, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 10756261, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00002.mds", "bytes": 44367923, "hashes": {}}, "samples": 11056, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00002.mds.zstd", "bytes": 7906246, "hashes": {}}}], "version": 2}
train/fineweb2-sampled-decay-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00012-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 44008506, "total_tokens_skipped": 22, "percentiles": {"0th": 79, "10th": 140, "20th": 198, "30th": 277, "40th": 349, "50th": 423, "60th": 569, "70th": 915, "80th": 1460, "90th": 2635, "95th": 4956, "99th": 8191, "100th": 8191}}
train/fineweb2-sampled-decay-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00012-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/fineweb2-sampled-decay-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00015-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67099906, "hashes": {}}, "samples": 16105, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 16054959, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 67077773, "hashes": {}}, "samples": 14907, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 15442324, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00002.mds", "bytes": 13953371, "hashes": {}}, "samples": 3754, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00002.mds.zstd", "bytes": 3341729, "hashes": {}}}], "version": 2}
train/fineweb2-sampled-decay-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00015-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 36478764, "total_tokens_skipped": 0, "percentiles": {"0th": 83, "10th": 139, "20th": 195, "30th": 274, "40th": 348, "50th": 420, "60th": 566, "70th": 887, "80th": 1374, "90th": 2411, "95th": 4423, "99th": 8191, "100th": 8191}}
train/fineweb2-sampled-decay-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00015-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/fineweb2-sampled-decay-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00016-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67091510, "hashes": {}}, "samples": 16404, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 15461028, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 67105109, "hashes": {}}, "samples": 15632, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 15892713, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00002.mds", "bytes": 11226851, "hashes": {}}, "samples": 2828, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00002.mds.zstd", "bytes": 2426748, "hashes": {}}}], "version": 2}
train/fineweb2-sampled-decay-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00016-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 35800346, "total_tokens_skipped": 37, "percentiles": {"0th": 84, "10th": 138, "20th": 193, "30th": 269, "40th": 343, "50th": 412, "60th": 537, "70th": 854, "80th": 1339, "90th": 2370, "95th": 4224, "99th": 8191, "100th": 8191}}
train/fineweb2-sampled-decay-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00016-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/fineweb2-sampled-decay-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00020-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67108349, "hashes": {}}, "samples": 16029, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 23144145, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 29446201, "hashes": {}}, "samples": 7051, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 10149981, "hashes": {}}}], "version": 2}
train/fineweb2-sampled-decay-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00020-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 23770897, "total_tokens_skipped": 9, "percentiles": {"0th": 80, "10th": 137, "20th": 192, "30th": 266, "40th": 343, "50th": 418, "60th": 561, "70th": 895, "80th": 1386, "90th": 2376, "95th": 4161, "99th": 8190, "100th": 8191}}
train/fineweb2-sampled-decay-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00020-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/fineweb2-sampled-decay-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00021-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67095078, "hashes": {}}, "samples": 16027, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 22826784, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 27761589, "hashes": {}}, "samples": 6960, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 9762979, "hashes": {}}}], "version": 2}
train/fineweb2-sampled-decay-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00021-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 23347874, "total_tokens_skipped": 30, "percentiles": {"0th": 79, "10th": 139, "20th": 194, "30th": 273, "40th": 351, "50th": 429, "60th": 585, "70th": 897, "80th": 1375, "90th": 2315, "95th": 3934, "99th": 8190, "100th": 8191}}
train/fineweb2-sampled-decay-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00021-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/fineweb2-sampled-decay-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00032-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67082177, "hashes": {}}, "samples": 14108, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 9708502, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 67107781, "hashes": {}}, "samples": 16238, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 8707063, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00002.mds", "bytes": 67103949, "hashes": {}}, "samples": 16106, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00002.mds.zstd", "bytes": 8869145, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00003.mds", "bytes": 21911990, "hashes": {}}, "samples": 5013, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00003.mds.zstd", "bytes": 2994292, "hashes": {}}}], "version": 2}
train/fineweb2-sampled-decay-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00032-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 54981326, "total_tokens_skipped": 0, "percentiles": {"0th": 83, "10th": 140, "20th": 200, "30th": 276, "40th": 349, "50th": 422, "60th": 567, "70th": 921, "80th": 1465, "90th": 2602, "95th": 4425, "99th": 8190, "100th": 8191}}
train/fineweb2-sampled-decay-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00032-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/fineweb2-sampled-decay-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00037-batch_0001-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/fineweb2-sampled-decay-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00040-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67105774, "hashes": {}}, "samples": 15686, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 17642990, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 64700318, "hashes": {}}, "samples": 15387, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 17268433, "hashes": {}}}], "version": 2}
train/fineweb2-sampled-decay-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00040-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 32456367, "total_tokens_skipped": 127, "percentiles": {"0th": 80, "10th": 141, "20th": 199, "30th": 277, "40th": 353, "50th": 423, "60th": 558, "70th": 890, "80th": 1388, "90th": 2392, "95th": 4361, "99th": 8191, "100th": 8191}}
train/fineweb2-sampled-decay-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00040-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/fineweb2-sampled-decay-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00041-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67108638, "hashes": {}}, "samples": 15848, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 18205412, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 37904007, "hashes": {}}, "samples": 9365, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 10667755, "hashes": {}}}], "version": 2}
train/fineweb2-sampled-decay-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00041-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 25851403, "total_tokens_skipped": 50, "percentiles": {"0th": 80, "10th": 139, "20th": 194, "30th": 274, "40th": 348, "50th": 417, "60th": 556, "70th": 872, "80th": 1381, "90th": 2432, "95th": 4086, "99th": 8190, "100th": 8191}}
train/fineweb2-sampled-decay-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00041-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/fineweb2-sampled-decay-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00047-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67108304, "hashes": {}}, "samples": 13424, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 8301154, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 67091318, "hashes": {}}, "samples": 15434, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 6365413, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00002.mds", "bytes": 67108433, "hashes": {}}, "samples": 15573, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00002.mds.zstd", "bytes": 6888802, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00003.mds", "bytes": 67107269, "hashes": {}}, "samples": 13691, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00003.mds.zstd", "bytes": 8181751, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00004.mds", "bytes": 67107024, "hashes": {}}, "samples": 13651, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00004.mds.zstd", "bytes": 8321092, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00005.mds", "bytes": 2877287, "hashes": {}}, "samples": 935, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00005.mds.zstd", "bytes": 297108, "hashes": {}}}], "version": 2}
train/fineweb2-sampled-decay-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00047-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00005.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:26b1f89c2bbefe24fca1e567b27a0f0ff09113139a693316442cbeae6abd268b
3
+ size 2877287
train/fineweb2-sampled-decay-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00047-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 83441157, "total_tokens_skipped": 50, "percentiles": {"0th": 81, "10th": 140, "20th": 201, "30th": 281, "40th": 354, "50th": 434, "60th": 613, "70th": 986, "80th": 1571, "90th": 2935, "95th": 5049, "99th": 8191, "100th": 8191}}
train/fineweb2-sampled-decay-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00047-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/fineweb2-sampled-decay-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00048-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67097141, "hashes": {}}, "samples": 16498, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 7242904, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 67100287, "hashes": {}}, "samples": 14613, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 9030991, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00002.mds", "bytes": 67100686, "hashes": {}}, "samples": 14915, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00002.mds.zstd", "bytes": 7142683, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00003.mds", "bytes": 67101786, "hashes": {}}, "samples": 15861, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00003.mds.zstd", "bytes": 7279183, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00004.mds", "bytes": 1718617, "hashes": {}}, "samples": 462, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00004.mds.zstd", "bytes": 200812, "hashes": {}}}], "version": 2}
train/fineweb2-sampled-decay-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00048-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 66536120, "total_tokens_skipped": 54, "percentiles": {"0th": 81, "10th": 139, "20th": 196, "30th": 273, "40th": 346, "50th": 417, "60th": 560, "70th": 914, "80th": 1465, "90th": 2624, "95th": 4508, "99th": 8190, "100th": 8191}}
train/fineweb2-sampled-decay-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00048-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/fineweb2-sampled-decay-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00055-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67108626, "hashes": {}}, "samples": 16693, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 12050501, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 67105896, "hashes": {}}, "samples": 14907, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 12010480, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00002.mds", "bytes": 56703223, "hashes": {}}, "samples": 12854, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00002.mds.zstd", "bytes": 10344028, "hashes": {}}}], "version": 2}
train/fineweb2-sampled-decay-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00055-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 47021073, "total_tokens_skipped": 81, "percentiles": {"0th": 80, "10th": 140, "20th": 196, "30th": 276, "40th": 347, "50th": 420, "60th": 554, "70th": 885, "80th": 1376, "90th": 2501, "95th": 4577, "99th": 8191, "100th": 8191}}
train/fineweb2-sampled-decay-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00055-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/fineweb2-sampled-decay-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00055-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67104390, "hashes": {}}, "samples": 15430, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 12298474, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 67108724, "hashes": {}}, "samples": 15344, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 13434097, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00002.mds", "bytes": 47041846, "hashes": {}}, "samples": 11079, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00002.mds.zstd", "bytes": 9336458, "hashes": {}}}], "version": 2}
train/fineweb2-sampled-decay-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00055-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 44646882, "total_tokens_skipped": 0, "percentiles": {"0th": 77, "10th": 139, "20th": 196, "30th": 272, "40th": 346, "50th": 417, "60th": 554, "70th": 894, "80th": 1419, "90th": 2500, "95th": 4687, "99th": 8191, "100th": 8191}}
train/fineweb2-sampled-decay-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00055-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/fineweb2-sampled-decay-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00058-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67100104, "hashes": {}}, "samples": 15418, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 13025959, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 67095534, "hashes": {}}, "samples": 15860, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 14447943, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00002.mds", "bytes": 8214262, "hashes": {}}, "samples": 2516, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00002.mds.zstd", "bytes": 1519297, "hashes": {}}}], "version": 2}
train/fineweb2-sampled-decay-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00058-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 35063986, "total_tokens_skipped": 10, "percentiles": {"0th": 85, "10th": 139, "20th": 195, "30th": 271, "40th": 348, "50th": 418, "60th": 559, "70th": 893, "80th": 1387, "90th": 2390, "95th": 4255, "99th": 8191, "100th": 8191}}
train/fineweb2-sampled-decay-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00058-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/fineweb2-sampled-decay-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00062-batch_0001-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67108434, "hashes": {}}, "samples": 16401, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 19454145, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 43345949, "hashes": {}}, "samples": 10910, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 13094100, "hashes": {}}}], "version": 2}
train/fineweb2-sampled-decay-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00062-batch_0001-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 27178455, "total_tokens_skipped": 25, "percentiles": {"0th": 81, "10th": 139, "20th": 190, "30th": 268, "40th": 345, "50th": 417, "60th": 563, "70th": 872, "80th": 1344, "90th": 2275, "95th": 3829, "99th": 8190, "100th": 8191}}
train/fineweb2-sampled-decay-v2/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00062-batch_0001-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff