Datasets:

ArXiv:
License:
orionweller commited on
Commit
00cef06
·
verified ·
1 Parent(s): 400f7f8

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. train/fineweb2-sampled-decay-v2/anp_Deva-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0006-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  2. train/fineweb2-sampled-decay-v2/anp_Deva-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0006-tokenized-chunked-8192-512-32-backfill-nodups/shard.00005.mds +3 -0
  3. train/fineweb2-sampled-decay-v2/anp_Deva-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0006-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  4. train/fineweb2-sampled-decay-v2/anp_Deva-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0006-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  5. train/fineweb2-sampled-decay-v2/anp_Deva-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0007-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  6. train/fineweb2-sampled-decay-v2/anp_Deva-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0007-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  7. train/fineweb2-sampled-decay-v2/anp_Deva-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0007-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  8. train/fineweb2-sampled-decay-v2/anp_Deva-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0009-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  9. train/fineweb2-sampled-decay-v2/anp_Deva-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0009-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  10. train/fineweb2-sampled-decay-v2/anp_Deva-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0009-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  11. train/fineweb2-sampled-decay-v2/anp_Deva-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0026-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  12. train/fineweb2-sampled-decay-v2/anp_Deva-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0026-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  13. train/fineweb2-sampled-decay-v2/anp_Deva-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0026-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  14. train/fineweb2-sampled-decay-v2/anp_Deva-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0043-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  15. train/fineweb2-sampled-decay-v2/anp_Deva-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0043-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  16. train/fineweb2-sampled-decay-v2/anp_Deva-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0043-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  17. train/fineweb2-sampled-decay-v2/anp_Deva-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0053-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  18. train/fineweb2-sampled-decay-v2/anp_Deva-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0053-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  19. train/fineweb2-sampled-decay-v2/anp_Deva-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0053-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  20. train/fineweb2-sampled-decay-v2/anp_Deva-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0060-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  21. train/fineweb2-sampled-decay-v2/anp_Deva-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0060-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  22. train/fineweb2-sampled-decay-v2/anp_Deva-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0060-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  23. train/fineweb2-sampled-decay-v2/anp_Deva-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0076-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  24. train/fineweb2-sampled-decay-v2/anp_Deva-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0076-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  25. train/fineweb2-sampled-decay-v2/anp_Deva-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0076-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  26. train/fineweb2-sampled-decay-v2/anp_Deva-tokenized-chunked-8192-512-32-backfill-nodups-sampled/index.json +1 -0
  27. train/fineweb2-sampled-decay-v2/anp_Deva-tokenized-chunked-8192-512-32-backfill-nodups-sampled/num_tokens.json +3 -0
  28. train/fineweb2-sampled-decay-v2/anp_Deva-tokenized-chunked-8192-512-32-backfill-nodups-sampled/sampling_summary.json +17 -0
  29. train/fineweb2-sampled-decay-v2/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00031-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds +3 -0
  30. train/fineweb2-sampled-decay-v2/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00031-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds +3 -0
  31. train/fineweb2-sampled-decay-v2/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00031-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds +3 -0
  32. train/fineweb2-sampled-decay-v2/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00031-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00003.mds +3 -0
  33. train/fineweb2-sampled-decay-v2/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00031-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00004.mds +3 -0
  34. train/fineweb2-sampled-decay-v2/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00074-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds +3 -0
  35. train/fineweb2-sampled-decay-v2/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00076-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds +3 -0
  36. train/fineweb2-sampled-decay-v2/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00076-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds +3 -0
  37. train/fineweb2-sampled-decay-v2/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00098-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds +3 -0
  38. train/fineweb2-sampled-decay-v2/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00098-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds +3 -0
  39. train/fineweb2-sampled-decay-v2/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00098-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds +3 -0
  40. train/fineweb2-sampled-decay-v2/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00101-batch_0001-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds +3 -0
  41. train/fineweb2-sampled-decay-v2/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00188-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds +3 -0
  42. train/fineweb2-sampled-decay-v2/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00218-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds +3 -0
  43. train/fineweb2-sampled-decay-v2/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00218-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds +3 -0
  44. train/fineweb2-sampled-decay-v2/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00218-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds +3 -0
  45. train/fineweb2-sampled-decay-v2/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00218-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00003.mds +3 -0
  46. train/fineweb2-sampled-decay-v2/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00232-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds +3 -0
  47. train/fineweb2-sampled-decay-v2/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00300-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds +3 -0
  48. train/fineweb2-sampled-decay-v2/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00300-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds +3 -0
  49. train/fineweb2-sampled-decay-v2/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00321-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds +3 -0
  50. train/fineweb2-sampled-decay-v2/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00496-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds +3 -0
train/fineweb2-sampled-decay-v2/anp_Deva-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0006-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67106688, "hashes": {}}, "samples": 12717, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 6724683, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 67104796, "hashes": {}}, "samples": 13773, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 6669763, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00002.mds", "bytes": 67091009, "hashes": {}}, "samples": 13380, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00002.mds.zstd", "bytes": 6618252, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00003.mds", "bytes": 67106919, "hashes": {}}, "samples": 12733, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00003.mds.zstd", "bytes": 6734490, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00004.mds", "bytes": 67103071, "hashes": {}}, "samples": 12539, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00004.mds.zstd", "bytes": 6551301, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00005.mds", "bytes": 7632870, "hashes": {}}, "samples": 1456, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00005.mds.zstd", "bytes": 795446, "hashes": {}}}], "version": 2}
train/fineweb2-sampled-decay-v2/anp_Deva-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0006-tokenized-chunked-8192-512-32-backfill-nodups/shard.00005.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6f558be79b88afbaca93589f66e82baf1d3ae508240c16c624b3161e6a720a19
3
+ size 7632870
train/fineweb2-sampled-decay-v2/anp_Deva-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0006-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 84721265, "total_tokens_skipped": 493, "percentiles": {"0th": 76, "10th": 344, "20th": 466, "30th": 573, "40th": 684, "50th": 827, "60th": 1033, "70th": 1329, "80th": 1794, "90th": 2672, "95th": 3689, "99th": 7769, "100th": 8191}}
train/fineweb2-sampled-decay-v2/anp_Deva-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0006-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/fineweb2-sampled-decay-v2/anp_Deva-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0007-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67107178, "hashes": {}}, "samples": 13044, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 6839174, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 67102599, "hashes": {}}, "samples": 13027, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 6816173, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00002.mds", "bytes": 67106885, "hashes": {}}, "samples": 12997, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00002.mds.zstd", "bytes": 7042820, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00003.mds", "bytes": 67103687, "hashes": {}}, "samples": 13062, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00003.mds.zstd", "bytes": 6704161, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00004.mds", "bytes": 60666309, "hashes": {}}, "samples": 11957, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00004.mds.zstd", "bytes": 6195452, "hashes": {}}}], "version": 2}
train/fineweb2-sampled-decay-v2/anp_Deva-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0007-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 81246808, "total_tokens_skipped": 120, "percentiles": {"0th": 80, "10th": 341, "20th": 466, "30th": 575, "40th": 692, "50th": 836, "60th": 1032, "70th": 1317, "80th": 1790, "90th": 2690, "95th": 3725, "99th": 7150, "100th": 8191}}
train/fineweb2-sampled-decay-v2/anp_Deva-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0007-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/fineweb2-sampled-decay-v2/anp_Deva-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0009-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67106445, "hashes": {}}, "samples": 13035, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 6797712, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 67107747, "hashes": {}}, "samples": 13376, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 6841329, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00002.mds", "bytes": 67103788, "hashes": {}}, "samples": 13048, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00002.mds.zstd", "bytes": 7018221, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00003.mds", "bytes": 67102231, "hashes": {}}, "samples": 13599, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00003.mds.zstd", "bytes": 7409846, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00004.mds", "bytes": 48879522, "hashes": {}}, "samples": 9842, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00004.mds.zstd", "bytes": 5039415, "hashes": {}}}], "version": 2}
train/fineweb2-sampled-decay-v2/anp_Deva-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0009-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 78319036, "total_tokens_skipped": 76, "percentiles": {"0th": 78, "10th": 341, "20th": 461, "30th": 569, "40th": 684, "50th": 824, "60th": 1022, "70th": 1300, "80th": 1751, "90th": 2580, "95th": 3616, "99th": 7213, "100th": 8191}}
train/fineweb2-sampled-decay-v2/anp_Deva-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0009-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/fineweb2-sampled-decay-v2/anp_Deva-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0026-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67107790, "hashes": {}}, "samples": 14416, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 8325737, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 67106982, "hashes": {}}, "samples": 14279, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 8512463, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00002.mds", "bytes": 67106798, "hashes": {}}, "samples": 14892, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00002.mds.zstd", "bytes": 8622889, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00003.mds", "bytes": 35666731, "hashes": {}}, "samples": 7694, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00003.mds.zstd", "bytes": 4559012, "hashes": {}}}], "version": 2}
train/fineweb2-sampled-decay-v2/anp_Deva-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0026-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 58427019, "total_tokens_skipped": 220, "percentiles": {"0th": 73, "10th": 334, "20th": 443, "30th": 541, "40th": 640, "50th": 766, "60th": 925, "70th": 1154, "80th": 1578, "90th": 2382, "95th": 3246, "99th": 6306, "100th": 8191}}
train/fineweb2-sampled-decay-v2/anp_Deva-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0026-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/fineweb2-sampled-decay-v2/anp_Deva-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0043-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67096159, "hashes": {}}, "samples": 15146, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 10635437, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 67102681, "hashes": {}}, "samples": 15056, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 10724979, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00002.mds", "bytes": 46115950, "hashes": {}}, "samples": 9932, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00002.mds.zstd", "bytes": 7369350, "hashes": {}}}], "version": 2}
train/fineweb2-sampled-decay-v2/anp_Deva-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0043-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 44436954, "total_tokens_skipped": 0, "percentiles": {"0th": 83, "10th": 324, "20th": 435, "30th": 530, "40th": 627, "50th": 740, "60th": 882, "70th": 1102, "80th": 1489, "90th": 2264, "95th": 3205, "99th": 6739, "100th": 8191}}
train/fineweb2-sampled-decay-v2/anp_Deva-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0043-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/fineweb2-sampled-decay-v2/anp_Deva-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0053-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67107968, "hashes": {}}, "samples": 15063, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 12338147, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 67107097, "hashes": {}}, "samples": 15507, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 12560877, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00002.mds", "bytes": 14930748, "hashes": {}}, "samples": 3435, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00002.mds.zstd", "bytes": 2862097, "hashes": {}}}], "version": 2}
train/fineweb2-sampled-decay-v2/anp_Deva-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0053-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 36742679, "total_tokens_skipped": 118, "percentiles": {"0th": 82, "10th": 327, "20th": 441, "30th": 539, "40th": 631, "50th": 740, "60th": 882, "70th": 1089, "80th": 1458, "90th": 2184, "95th": 3035, "99th": 5742, "100th": 8191}}
train/fineweb2-sampled-decay-v2/anp_Deva-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0053-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/fineweb2-sampled-decay-v2/anp_Deva-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0060-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67105933, "hashes": {}}, "samples": 15697, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 14150528, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 62186537, "hashes": {}}, "samples": 14280, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 13232425, "hashes": {}}}], "version": 2}
train/fineweb2-sampled-decay-v2/anp_Deva-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0060-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 31843791, "total_tokens_skipped": 0, "percentiles": {"0th": 80, "10th": 322, "20th": 432, "30th": 526, "40th": 622, "50th": 723, "60th": 850, "70th": 1050, "80th": 1402, "90th": 2138, "95th": 3008, "99th": 6384, "100th": 8191}}
train/fineweb2-sampled-decay-v2/anp_Deva-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0060-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/fineweb2-sampled-decay-v2/anp_Deva-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0076-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67106446, "hashes": {}}, "samples": 16251, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 19783622, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 21156013, "hashes": {}}, "samples": 5090, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 6331499, "hashes": {}}}], "version": 2}
train/fineweb2-sampled-decay-v2/anp_Deva-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0076-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 21724369, "total_tokens_skipped": 43, "percentiles": {"0th": 81, "10th": 317, "20th": 427, "30th": 522, "40th": 612, "50th": 710, "60th": 829, "70th": 1013, "80th": 1317, "90th": 1972, "95th": 2811, "99th": 6068, "100th": 8191}}
train/fineweb2-sampled-decay-v2/anp_Deva-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0076-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/fineweb2-sampled-decay-v2/anp_Deva-tokenized-chunked-8192-512-32-backfill-nodups-sampled/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"version": 2, "shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "000_00000-batch_0006-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds", "bytes": 67106688, "hashes": {}}, "samples": 12717, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "000_00000-batch_0006-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds.zstd", "bytes": 6724683, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "000_00000-batch_0006-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds", "bytes": 67104796, "hashes": {}}, "samples": 13773, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "000_00000-batch_0006-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds.zstd", "bytes": 6669763, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "000_00000-batch_0006-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds", "bytes": 67091009, "hashes": {}}, "samples": 13380, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "000_00000-batch_0006-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds.zstd", "bytes": 6618252, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "000_00000-batch_0006-tokenized-chunked-8192-512-32-backfill-nodups/shard.00003.mds", "bytes": 67106919, "hashes": {}}, "samples": 12733, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "000_00000-batch_0006-tokenized-chunked-8192-512-32-backfill-nodups/shard.00003.mds.zstd", "bytes": 6734490, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "000_00000-batch_0006-tokenized-chunked-8192-512-32-backfill-nodups/shard.00004.mds", "bytes": 67103071, "hashes": {}}, "samples": 12539, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "000_00000-batch_0006-tokenized-chunked-8192-512-32-backfill-nodups/shard.00004.mds.zstd", "bytes": 6551301, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "000_00000-batch_0006-tokenized-chunked-8192-512-32-backfill-nodups/shard.00005.mds", "bytes": 7632870, "hashes": {}}, "samples": 1456, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "000_00000-batch_0006-tokenized-chunked-8192-512-32-backfill-nodups/shard.00005.mds.zstd", "bytes": 795446, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "000_00000-batch_0009-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds", "bytes": 67106445, "hashes": {}}, "samples": 13035, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "000_00000-batch_0009-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds.zstd", "bytes": 6797712, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "000_00000-batch_0009-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds", "bytes": 67107747, "hashes": {}}, "samples": 13376, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "000_00000-batch_0009-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds.zstd", "bytes": 6841329, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "000_00000-batch_0009-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds", "bytes": 67103788, "hashes": {}}, "samples": 13048, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "000_00000-batch_0009-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds.zstd", "bytes": 7018221, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "000_00000-batch_0009-tokenized-chunked-8192-512-32-backfill-nodups/shard.00003.mds", "bytes": 67102231, "hashes": {}}, "samples": 13599, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "000_00000-batch_0009-tokenized-chunked-8192-512-32-backfill-nodups/shard.00003.mds.zstd", "bytes": 7409846, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "000_00000-batch_0009-tokenized-chunked-8192-512-32-backfill-nodups/shard.00004.mds", "bytes": 48879522, "hashes": {}}, "samples": 9842, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "000_00000-batch_0009-tokenized-chunked-8192-512-32-backfill-nodups/shard.00004.mds.zstd", "bytes": 5039415, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "000_00000-batch_0060-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds", "bytes": 67105933, "hashes": {}}, "samples": 15697, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "000_00000-batch_0060-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds.zstd", "bytes": 14150528, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "000_00000-batch_0060-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds", "bytes": 62186537, "hashes": {}}, "samples": 14280, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "000_00000-batch_0060-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds.zstd", "bytes": 13232425, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "000_00000-batch_0053-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds", "bytes": 67107968, "hashes": {}}, "samples": 15063, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "000_00000-batch_0053-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds.zstd", "bytes": 12338147, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "000_00000-batch_0053-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds", "bytes": 67107097, "hashes": {}}, "samples": 15507, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "000_00000-batch_0053-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds.zstd", "bytes": 12560877, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "000_00000-batch_0053-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds", "bytes": 14930748, "hashes": {}}, "samples": 3435, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "000_00000-batch_0053-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds.zstd", "bytes": 2862097, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "000_00000-batch_0076-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds", "bytes": 67106446, "hashes": {}}, "samples": 16251, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "000_00000-batch_0076-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds.zstd", "bytes": 19783622, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "000_00000-batch_0076-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds", "bytes": 21156013, "hashes": {}}, "samples": 5090, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "000_00000-batch_0076-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds.zstd", "bytes": 6331499, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "000_00000-batch_0007-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds", "bytes": 67107178, "hashes": {}}, "samples": 13044, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "000_00000-batch_0007-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds.zstd", "bytes": 6839174, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "000_00000-batch_0007-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds", "bytes": 67102599, "hashes": {}}, "samples": 13027, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "000_00000-batch_0007-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds.zstd", "bytes": 6816173, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "000_00000-batch_0007-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds", "bytes": 67106885, "hashes": {}}, "samples": 12997, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "000_00000-batch_0007-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds.zstd", "bytes": 7042820, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "000_00000-batch_0007-tokenized-chunked-8192-512-32-backfill-nodups/shard.00003.mds", "bytes": 67103687, "hashes": {}}, "samples": 13062, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "000_00000-batch_0007-tokenized-chunked-8192-512-32-backfill-nodups/shard.00003.mds.zstd", "bytes": 6704161, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "000_00000-batch_0007-tokenized-chunked-8192-512-32-backfill-nodups/shard.00004.mds", "bytes": 60666309, "hashes": {}}, "samples": 11957, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "000_00000-batch_0007-tokenized-chunked-8192-512-32-backfill-nodups/shard.00004.mds.zstd", "bytes": 6195452, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "000_00000-batch_0043-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds", "bytes": 67096159, "hashes": {}}, "samples": 15146, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "000_00000-batch_0043-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds.zstd", "bytes": 10635437, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "000_00000-batch_0043-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds", "bytes": 67102681, "hashes": {}}, "samples": 15056, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "000_00000-batch_0043-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds.zstd", "bytes": 10724979, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "000_00000-batch_0043-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds", "bytes": 46115950, "hashes": {}}, "samples": 9932, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "000_00000-batch_0043-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds.zstd", "bytes": 7369350, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "000_00000-batch_0026-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds", "bytes": 67107790, "hashes": {}}, "samples": 14416, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "000_00000-batch_0026-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds.zstd", "bytes": 8325737, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "000_00000-batch_0026-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds", "bytes": 67106982, "hashes": {}}, "samples": 14279, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "000_00000-batch_0026-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds.zstd", "bytes": 8512463, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "000_00000-batch_0026-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds", "bytes": 67106798, "hashes": {}}, "samples": 14892, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "000_00000-batch_0026-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds.zstd", "bytes": 8622889, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "000_00000-batch_0026-tokenized-chunked-8192-512-32-backfill-nodups/shard.00003.mds", "bytes": 35666731, "hashes": {}}, "samples": 7694, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "000_00000-batch_0026-tokenized-chunked-8192-512-32-backfill-nodups/shard.00003.mds.zstd", "bytes": 4559012, "hashes": {}}}]}
train/fineweb2-sampled-decay-v2/anp_Deva-tokenized-chunked-8192-512-32-backfill-nodups-sampled/num_tokens.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ {
2
+ "num_tokens": 437461921
3
+ }
train/fineweb2-sampled-decay-v2/anp_Deva-tokenized-chunked-8192-512-32-backfill-nodups-sampled/sampling_summary.json ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "total_tokens": 437461921,
3
+ "target_tokens": 430556289,
4
+ "num_unique_folders": 8,
5
+ "num_total_folders_copied": 8,
6
+ "num_upsampled_folders": 0,
7
+ "copied_folders": [
8
+ "/home/oweller2/my_scratch/bert24-data/data/mmbert-decay/train/fineweb2-sampled-decay-v2/anp_Deva-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0076-tokenized-chunked-8192-512-32-backfill-nodups",
9
+ "/home/oweller2/my_scratch/bert24-data/data/mmbert-decay/train/fineweb2-sampled-decay-v2/anp_Deva-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0006-tokenized-chunked-8192-512-32-backfill-nodups",
10
+ "/home/oweller2/my_scratch/bert24-data/data/mmbert-decay/train/fineweb2-sampled-decay-v2/anp_Deva-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0053-tokenized-chunked-8192-512-32-backfill-nodups",
11
+ "/home/oweller2/my_scratch/bert24-data/data/mmbert-decay/train/fineweb2-sampled-decay-v2/anp_Deva-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0009-tokenized-chunked-8192-512-32-backfill-nodups",
12
+ "/home/oweller2/my_scratch/bert24-data/data/mmbert-decay/train/fineweb2-sampled-decay-v2/anp_Deva-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0060-tokenized-chunked-8192-512-32-backfill-nodups",
13
+ "/home/oweller2/my_scratch/bert24-data/data/mmbert-decay/train/fineweb2-sampled-decay-v2/anp_Deva-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0007-tokenized-chunked-8192-512-32-backfill-nodups",
14
+ "/home/oweller2/my_scratch/bert24-data/data/mmbert-decay/train/fineweb2-sampled-decay-v2/anp_Deva-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0043-tokenized-chunked-8192-512-32-backfill-nodups",
15
+ "/home/oweller2/my_scratch/bert24-data/data/mmbert-decay/train/fineweb2-sampled-decay-v2/anp_Deva-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0026-tokenized-chunked-8192-512-32-backfill-nodups"
16
+ ]
17
+ }
train/fineweb2-sampled-decay-v2/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00031-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e27c371ac1fa98a06c079fe89ab7f4a4c94be84572f1972f97fd57520e6e551e
3
+ size 67107892
train/fineweb2-sampled-decay-v2/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00031-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3f87f49cc58f56d15fe885bcd3ce5722c7907997f0c9f6db0d8ef2c3c0eaf7f5
3
+ size 67092476
train/fineweb2-sampled-decay-v2/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00031-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2762356bd374b177d45cfe92951c3d339c5a796ee10c41c15e609e88f00da881
3
+ size 67107006
train/fineweb2-sampled-decay-v2/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00031-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00003.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3c93ee59a0fb6a6c4c0a666313cf7a6682a5f8ebd5e3d49bfe4c0b6a1d50228b
3
+ size 67105078
train/fineweb2-sampled-decay-v2/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00031-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00004.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e3f84f2aeca8c30006abd970f907f98ce9eab9a9dcfcc9c0ceeaf71d86b48be6
3
+ size 45226822
train/fineweb2-sampled-decay-v2/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00074-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6436b7d676a8b162e22fadc84161c5a5bf64a5379b8f5b6e17058cff3c9cc40c
3
+ size 67106412
train/fineweb2-sampled-decay-v2/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00076-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3bd70de2f60cfd5934311f2d5f2cc98b40c1fd6a007bbe704631e972dc7bf078
3
+ size 67099121
train/fineweb2-sampled-decay-v2/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00076-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:28a1b41300dedc58d88d5516745ab6cd250eb8a12b0945960a071a30a838d454
3
+ size 57626261
train/fineweb2-sampled-decay-v2/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00098-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1ce8aa355a9f8695ca9e9e0074eed5350842c8fbf9a82dae5a1ac88950decfc7
3
+ size 67106706
train/fineweb2-sampled-decay-v2/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00098-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3e2ee5b334649da0710207580dd7aa10a9d43dd950d6d2b57f3851e248b555c9
3
+ size 67104160
train/fineweb2-sampled-decay-v2/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00098-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:368ddacd2ca660fc0153283c577c65a7914e10a3ff961f9c4d361a9b82efad61
3
+ size 53664818
train/fineweb2-sampled-decay-v2/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00101-batch_0001-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b4aa933e6d0c629a9d9ffabe14604cffa7b690f83d7b69bbf0b8081d01cdc36f
3
+ size 66960412
train/fineweb2-sampled-decay-v2/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00188-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:997d6a1a1523bdec7f80f99c6b2c2f9c6b5029fc9f81ae02bd2c8ce2716b6021
3
+ size 67087030
train/fineweb2-sampled-decay-v2/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00218-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:20c55b3a9763aedc86531d1d7368b66be903c951abb740c9bcee088c554cfab4
3
+ size 67108800
train/fineweb2-sampled-decay-v2/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00218-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:642af04e3ee6e353d963f8650f72559a0062e3e9d6061d642184fd17492bdb25
3
+ size 67106437
train/fineweb2-sampled-decay-v2/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00218-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9d17876b341eea120fe546ca8b3c9c581a47230b72c75b855c36241cd6c998b4
3
+ size 67104347
train/fineweb2-sampled-decay-v2/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00218-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00003.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d420c56c35a168d09c19c13326829aa50b9c462d644a80c7bc0a16005c0ce5c8
3
+ size 65285316
train/fineweb2-sampled-decay-v2/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00232-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e2177ea1751b046ea077740e9a64eb217738e0d9b7316d9f4c4ed43dd1382013
3
+ size 67100752
train/fineweb2-sampled-decay-v2/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00300-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e2d68f3db055017a2ccd40a926e6e6523370521abb855b1603b72b464aafd04f
3
+ size 67108801
train/fineweb2-sampled-decay-v2/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00300-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:aa552dd8ebb5b69e243d0e8070677918921ab13c2565a9b7fabfdc32ee36493c
3
+ size 67108557
train/fineweb2-sampled-decay-v2/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00321-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4f73a5ad7910726602205bd02bd4d3a02e73a29ff7cc1e45cd1d944716fbbab3
3
+ size 67106911
train/fineweb2-sampled-decay-v2/deu_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00496-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:93075f9abda336098ed6d6c9640fd1758a30a356c019da665af63fcfa4ffba02
3
+ size 67104178