Datasets:

ArXiv:
License:
orionweller commited on
Commit
5ba2ed0
·
verified ·
1 Parent(s): ad1eb35

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitattributes +1 -0
  2. train/fineweb2-sampled-decay-v2/ars_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0005-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  3. train/fineweb2-sampled-decay-v2/ars_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0005-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  4. train/fineweb2-sampled-decay-v2/ars_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0005-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  5. train/fineweb2-sampled-decay-v2/ars_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0007-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  6. train/fineweb2-sampled-decay-v2/ars_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0007-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  7. train/fineweb2-sampled-decay-v2/ars_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0021-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  8. train/fineweb2-sampled-decay-v2/ars_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0021-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  9. train/fineweb2-sampled-decay-v2/ars_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0021-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  10. train/fineweb2-sampled-decay-v2/ars_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0050-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  11. train/fineweb2-sampled-decay-v2/ars_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0050-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  12. train/fineweb2-sampled-decay-v2/ars_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0050-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  13. train/fineweb2-sampled-decay-v2/ars_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0051-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  14. train/fineweb2-sampled-decay-v2/ars_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0051-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  15. train/fineweb2-sampled-decay-v2/ars_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0058-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  16. train/fineweb2-sampled-decay-v2/ars_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0058-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  17. train/fineweb2-sampled-decay-v2/ars_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0058-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  18. train/fineweb2-sampled-decay-v2/ars_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/index.json +1 -0
  19. train/fineweb2-sampled-decay-v2/ars_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/num_tokens.json +3 -0
  20. train/fineweb2-sampled-decay-v2/ars_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/sampling_summary.json +20 -0
  21. train/fineweb2-sampled-decay-v2/hun_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds +3 -0
  22. train/fineweb2-sampled-decay-v2/hun_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds +3 -0
  23. train/fineweb2-sampled-decay-v2/hun_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds +3 -0
  24. train/fineweb2-sampled-decay-v2/hun_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/shard.00003.mds +3 -0
  25. train/fineweb2-sampled-decay-v2/hun_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/shard.00004.mds +3 -0
  26. train/fineweb2-sampled-decay-v2/hun_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/shard.00005.mds +3 -0
  27. train/fineweb2-sampled-decay-v2/hun_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00002-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds +3 -0
  28. train/fineweb2-sampled-decay-v2/hun_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00002-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds +3 -0
  29. train/fineweb2-sampled-decay-v2/hun_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00002-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00003.mds +3 -0
  30. train/fineweb2-sampled-decay-v2/hun_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00002-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00004.mds +3 -0
  31. train/fineweb2-sampled-decay-v2/hun_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00002-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00005.mds +3 -0
  32. train/fineweb2-sampled-decay-v2/hun_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00002-batch_0001-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  33. train/fineweb2-sampled-decay-v2/hun_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00002-batch_0001-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  34. train/fineweb2-sampled-decay-v2/hun_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00002-batch_0001-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  35. train/fineweb2-sampled-decay-v2/hun_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00004-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds +3 -0
  36. train/fineweb2-sampled-decay-v2/hun_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00004-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds +3 -0
  37. train/fineweb2-sampled-decay-v2/hun_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00004-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/shard.00003.mds +3 -0
  38. train/fineweb2-sampled-decay-v2/hun_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00004-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/shard.00004.mds +3 -0
  39. train/fineweb2-sampled-decay-v2/hun_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00004-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/shard.00005.mds +3 -0
  40. train/fineweb2-sampled-decay-v2/hun_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00006-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds +3 -0
  41. train/fineweb2-sampled-decay-v2/hun_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00006-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00004.mds +3 -0
  42. train/fineweb2-sampled-decay-v2/hun_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00009-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds +3 -0
  43. train/fineweb2-sampled-decay-v2/hun_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00009-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds +3 -0
  44. train/fineweb2-sampled-decay-v2/hun_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00009-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/shard.00004.mds +3 -0
  45. train/fineweb2-sampled-decay-v2/hun_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00013-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds +3 -0
  46. train/fineweb2-sampled-decay-v2/hun_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00013-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds +3 -0
  47. train/fineweb2-sampled-decay-v2/hun_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00023-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds +3 -0
  48. train/fineweb2-sampled-decay-v2/hun_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00029-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  49. train/fineweb2-sampled-decay-v2/hun_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00029-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/shard.00004.mds +3 -0
  50. train/fineweb2-sampled-decay-v2/hun_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00029-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
.gitattributes CHANGED
@@ -57,3 +57,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
57
  # Video files - compressed
58
  *.mp4 filter=lfs diff=lfs merge=lfs -text
59
  *.webm filter=lfs diff=lfs merge=lfs -text
 
 
57
  # Video files - compressed
58
  *.mp4 filter=lfs diff=lfs merge=lfs -text
59
  *.webm filter=lfs diff=lfs merge=lfs -text
60
+ train/tulu_flan-sampled-decay/tulu_flan_0060-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json filter=lfs diff=lfs merge=lfs -text
train/fineweb2-sampled-decay-v2/ars_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0005-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67094854, "hashes": {}}, "samples": 17296, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 6761999, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 67080947, "hashes": {}}, "samples": 7560, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 6239681, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00002.mds", "bytes": 67108742, "hashes": {}}, "samples": 27626, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00002.mds.zstd", "bytes": 7719407, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00003.mds", "bytes": 60313877, "hashes": {}}, "samples": 24831, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00003.mds.zstd", "bytes": 6769446, "hashes": {}}}], "version": 2}
train/fineweb2-sampled-decay-v2/ars_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0005-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 64170544, "total_tokens_skipped": 18, "percentiles": {"0th": 80, "10th": 130, "20th": 159, "30th": 196, "40th": 244, "50th": 304, "60th": 395, "70th": 545, "80th": 812, "90th": 1579, "95th": 3682, "99th": 8191, "100th": 8191}}
train/fineweb2-sampled-decay-v2/ars_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0005-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/fineweb2-sampled-decay-v2/ars_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0007-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 60325503, "total_tokens_skipped": 0, "percentiles": {"0th": 77, "10th": 129, "20th": 158, "30th": 194, "40th": 240, "50th": 302, "60th": 388, "70th": 527, "80th": 769, "90th": 1468, "95th": 3307, "99th": 8191, "100th": 8191}}
train/fineweb2-sampled-decay-v2/ars_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0007-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/fineweb2-sampled-decay-v2/ars_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0021-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67108584, "hashes": {}}, "samples": 23747, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 8824281, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 67107627, "hashes": {}}, "samples": 22939, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 8935673, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00002.mds", "bytes": 36647674, "hashes": {}}, "samples": 15750, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00002.mds.zstd", "bytes": 5027482, "hashes": {}}}], "version": 2}
train/fineweb2-sampled-decay-v2/ars_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0021-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 41723616, "total_tokens_skipped": 10, "percentiles": {"0th": 76, "10th": 130, "20th": 159, "30th": 195, "40th": 239, "50th": 298, "60th": 385, "70th": 512, "80th": 717, "90th": 1250, "95th": 2145, "99th": 8191, "100th": 8191}}
train/fineweb2-sampled-decay-v2/ars_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0021-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/fineweb2-sampled-decay-v2/ars_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0050-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67083193, "hashes": {}}, "samples": 25514, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 15358526, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 32112520, "hashes": {}}, "samples": 9825, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 6999204, "hashes": {}}}], "version": 2}
train/fineweb2-sampled-decay-v2/ars_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0050-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 24237098, "total_tokens_skipped": 30, "percentiles": {"0th": 73, "10th": 132, "20th": 163, "30th": 200, "40th": 246, "50th": 311, "60th": 402, "70th": 545, "80th": 768, "90th": 1299, "95th": 2189, "99th": 8190, "100th": 8191}}
train/fineweb2-sampled-decay-v2/ars_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0050-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/fineweb2-sampled-decay-v2/ars_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0051-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67105906, "hashes": {}}, "samples": 26734, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 15842343, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 19071668, "hashes": {}}, "samples": 8141, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 4715475, "hashes": {}}}], "version": 2}
train/fineweb2-sampled-decay-v2/ars_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0051-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 20990053, "total_tokens_skipped": 0, "percentiles": {"0th": 75, "10th": 131, "20th": 160, "30th": 197, "40th": 240, "50th": 299, "60th": 386, "70th": 510, "80th": 715, "90th": 1154, "95th": 1877, "99th": 7609, "100th": 8191}}
train/fineweb2-sampled-decay-v2/ars_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0058-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67081464, "hashes": {}}, "samples": 25925, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 17844322, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 11926963, "hashes": {}}, "samples": 4530, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 3057891, "hashes": {}}}], "version": 2}
train/fineweb2-sampled-decay-v2/ars_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0058-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 19267991, "total_tokens_skipped": 0, "percentiles": {"0th": 79, "10th": 133, "20th": 162, "30th": 196, "40th": 240, "50th": 301, "60th": 385, "70th": 517, "80th": 732, "90th": 1182, "95th": 1958, "99th": 8190, "100th": 8191}}
train/fineweb2-sampled-decay-v2/ars_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0058-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/fineweb2-sampled-decay-v2/ars_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"version": 2, "shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "000_00000-batch_0005-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds", "bytes": 67094854, "hashes": {}}, "samples": 17296, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "000_00000-batch_0005-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds.zstd", "bytes": 6761999, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "000_00000-batch_0005-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds", "bytes": 67080947, "hashes": {}}, "samples": 7560, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "000_00000-batch_0005-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds.zstd", "bytes": 6239681, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "000_00000-batch_0005-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds", "bytes": 67108742, "hashes": {}}, "samples": 27626, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "000_00000-batch_0005-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds.zstd", "bytes": 7719407, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "000_00000-batch_0005-tokenized-chunked-8192-512-32-backfill-nodups/shard.00003.mds", "bytes": 60313877, "hashes": {}}, "samples": 24831, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "000_00000-batch_0005-tokenized-chunked-8192-512-32-backfill-nodups/shard.00003.mds.zstd", "bytes": 6769446, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "000_00000-batch_0021-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds", "bytes": 67108584, "hashes": {}}, "samples": 23747, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "000_00000-batch_0021-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds.zstd", "bytes": 8824281, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "000_00000-batch_0021-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds", "bytes": 67107627, "hashes": {}}, "samples": 22939, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "000_00000-batch_0021-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds.zstd", "bytes": 8935673, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "000_00000-batch_0021-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds", "bytes": 36647674, "hashes": {}}, "samples": 15750, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "000_00000-batch_0021-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds.zstd", "bytes": 5027482, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "000_00000-batch_0007-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds", "bytes": 67108217, "hashes": {}}, "samples": 30813, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "000_00000-batch_0007-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds.zstd", "bytes": 7760506, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "000_00000-batch_0007-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds", "bytes": 67106953, "hashes": {}}, "samples": 12937, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "000_00000-batch_0007-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds.zstd", "bytes": 6672258, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "000_00000-batch_0007-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds", "bytes": 67108766, "hashes": {}}, "samples": 11214, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "000_00000-batch_0007-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds.zstd", "bytes": 6338977, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "000_00000-batch_0007-tokenized-chunked-8192-512-32-backfill-nodups/shard.00003.mds", "bytes": 44809906, "hashes": {}}, "samples": 21030, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "000_00000-batch_0007-tokenized-chunked-8192-512-32-backfill-nodups/shard.00003.mds.zstd", "bytes": 5526425, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "000_00000-batch_0067-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds", "bytes": 67100272, "hashes": {}}, "samples": 24154, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "000_00000-batch_0067-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds.zstd", "bytes": 20698447, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "000_00000-batch_0067-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds", "bytes": 3488957, "hashes": {}}, "samples": 1345, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "000_00000-batch_0067-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds.zstd", "bytes": 1163445, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "000_00000-batch_0063-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds", "bytes": 67085812, "hashes": {}}, "samples": 24483, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "000_00000-batch_0063-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds.zstd", "bytes": 19958519, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "000_00000-batch_0063-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds", "bytes": 7390688, "hashes": {}}, "samples": 2700, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "000_00000-batch_0063-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds.zstd", "bytes": 2177160, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "000_00000-batch_0058-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds", "bytes": 67081464, "hashes": {}}, "samples": 25925, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "000_00000-batch_0058-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds.zstd", "bytes": 17844322, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "000_00000-batch_0058-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds", "bytes": 11926963, "hashes": {}}, "samples": 4530, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "000_00000-batch_0058-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds.zstd", "bytes": 3057891, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "000_00000-batch_0050-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds", "bytes": 67083193, "hashes": {}}, "samples": 25514, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "000_00000-batch_0050-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds.zstd", "bytes": 15358526, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "000_00000-batch_0050-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds", "bytes": 32112520, "hashes": {}}, "samples": 9825, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "000_00000-batch_0050-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds.zstd", "bytes": 6999204, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "000_00000-batch_0051-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds", "bytes": 67105906, "hashes": {}}, "samples": 26734, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "000_00000-batch_0051-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds.zstd", "bytes": 15842343, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "000_00000-batch_0051-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds", "bytes": 19071668, "hashes": {}}, "samples": 8141, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "000_00000-batch_0051-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds.zstd", "bytes": 4715475, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "000_00000-batch_0004-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds", "bytes": 67107947, "hashes": {}}, "samples": 27256, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "000_00000-batch_0004-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds.zstd", "bytes": 7323348, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "000_00000-batch_0004-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds", "bytes": 67089491, "hashes": {}}, "samples": 7996, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "000_00000-batch_0004-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds.zstd", "bytes": 6399377, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "000_00000-batch_0004-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds", "bytes": 67082665, "hashes": {}}, "samples": 9664, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "000_00000-batch_0004-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds.zstd", "bytes": 6198657, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "000_00000-batch_0004-tokenized-chunked-8192-512-32-backfill-nodups/shard.00003.mds", "bytes": 67090885, "hashes": {}}, "samples": 25619, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "000_00000-batch_0004-tokenized-chunked-8192-512-32-backfill-nodups/shard.00003.mds.zstd", "bytes": 7133640, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "000_00000-batch_0004-tokenized-chunked-8192-512-32-backfill-nodups/shard.00004.mds", "bytes": 27049793, "hashes": {}}, "samples": 9651, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "000_00000-batch_0004-tokenized-chunked-8192-512-32-backfill-nodups/shard.00004.mds.zstd", "bytes": 2638377, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "000_00000-batch_0048-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds", "bytes": 67107695, "hashes": {}}, "samples": 26713, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "000_00000-batch_0048-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds.zstd", "bytes": 15216105, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "000_00000-batch_0048-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds", "bytes": 27010800, "hashes": {}}, "samples": 10680, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "000_00000-batch_0048-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds.zstd", "bytes": 6279699, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "000_00000-batch_0053-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds", "bytes": 67108282, "hashes": {}}, "samples": 27122, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "000_00000-batch_0053-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds.zstd", "bytes": 16704394, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "000_00000-batch_0053-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds", "bytes": 15332332, "hashes": {}}, "samples": 5985, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "000_00000-batch_0053-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds.zstd", "bytes": 3923952, "hashes": {}}}]}
train/fineweb2-sampled-decay-v2/ars_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/num_tokens.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ {
2
+ "num_tokens": 381742947
3
+ }
train/fineweb2-sampled-decay-v2/ars_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/sampling_summary.json ADDED
@@ -0,0 +1,20 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "total_tokens": 381742947,
3
+ "target_tokens": 363746546,
4
+ "num_unique_folders": 11,
5
+ "num_total_folders_copied": 11,
6
+ "num_upsampled_folders": 0,
7
+ "copied_folders": [
8
+ "/home/oweller2/my_scratch/bert24-data/data/mmbert-decay/train/fineweb2-sampled-decay-v2/ars_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0058-tokenized-chunked-8192-512-32-backfill-nodups",
9
+ "/home/oweller2/my_scratch/bert24-data/data/mmbert-decay/train/fineweb2-sampled-decay-v2/ars_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0005-tokenized-chunked-8192-512-32-backfill-nodups",
10
+ "/home/oweller2/my_scratch/bert24-data/data/mmbert-decay/train/fineweb2-sampled-decay-v2/ars_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0063-tokenized-chunked-8192-512-32-backfill-nodups",
11
+ "/home/oweller2/my_scratch/bert24-data/data/mmbert-decay/train/fineweb2-sampled-decay-v2/ars_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0021-tokenized-chunked-8192-512-32-backfill-nodups",
12
+ "/home/oweller2/my_scratch/bert24-data/data/mmbert-decay/train/fineweb2-sampled-decay-v2/ars_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0051-tokenized-chunked-8192-512-32-backfill-nodups",
13
+ "/home/oweller2/my_scratch/bert24-data/data/mmbert-decay/train/fineweb2-sampled-decay-v2/ars_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0067-tokenized-chunked-8192-512-32-backfill-nodups",
14
+ "/home/oweller2/my_scratch/bert24-data/data/mmbert-decay/train/fineweb2-sampled-decay-v2/ars_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0050-tokenized-chunked-8192-512-32-backfill-nodups",
15
+ "/home/oweller2/my_scratch/bert24-data/data/mmbert-decay/train/fineweb2-sampled-decay-v2/ars_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0007-tokenized-chunked-8192-512-32-backfill-nodups",
16
+ "/home/oweller2/my_scratch/bert24-data/data/mmbert-decay/train/fineweb2-sampled-decay-v2/ars_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0004-tokenized-chunked-8192-512-32-backfill-nodups",
17
+ "/home/oweller2/my_scratch/bert24-data/data/mmbert-decay/train/fineweb2-sampled-decay-v2/ars_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0053-tokenized-chunked-8192-512-32-backfill-nodups",
18
+ "/home/oweller2/my_scratch/bert24-data/data/mmbert-decay/train/fineweb2-sampled-decay-v2/ars_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0048-tokenized-chunked-8192-512-32-backfill-nodups"
19
+ ]
20
+ }
train/fineweb2-sampled-decay-v2/hun_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:39723e42caed186e0db3254cbfb04c9626c91acdb9702ea5b1bd84d76be214e6
3
+ size 67108704
train/fineweb2-sampled-decay-v2/hun_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d9024106edd12ceab701cdfbb3ccf87be65ab1c93d882ce153bb63da28a6c46d
3
+ size 67090304
train/fineweb2-sampled-decay-v2/hun_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1d3e667478d447344a8554c0a474f821acdded6ef5a10023c0c87dfac6f27f43
3
+ size 67108825
train/fineweb2-sampled-decay-v2/hun_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/shard.00003.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7e58f8f2493b94a1f7fd1dd2b5c1baf5da644c493e59e04f3701ca46151cb9ce
3
+ size 67091680
train/fineweb2-sampled-decay-v2/hun_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/shard.00004.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a92b43d129f5c074120fc431b5da47fc962784bc807b4aac86e4325124e22b78
3
+ size 67106056
train/fineweb2-sampled-decay-v2/hun_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/shard.00005.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1407816b7bd1a553a556a5f2424cc5084d5480ecfcaf2a2cd994027f15279e6f
3
+ size 47660312
train/fineweb2-sampled-decay-v2/hun_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00002-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d8647003353702378960842fa75981af5c30a56b8cca9c3738523350175cbf8f
3
+ size 67079120
train/fineweb2-sampled-decay-v2/hun_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00002-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ecb1c1c8ff7eb506d6e3547ddb0d1b0c351e1eaa0c03097dbf24e985d9749d31
3
+ size 67104612
train/fineweb2-sampled-decay-v2/hun_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00002-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00003.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c6ae7f9ee90d3893e36239b0b133f2882301fbd3aa9b65a9ef611fc869d0221e
3
+ size 67103004
train/fineweb2-sampled-decay-v2/hun_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00002-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00004.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4e65ecdef6d249756082df658643c1ccc81a787531afed45330e9a652dc60ca9
3
+ size 67104395
train/fineweb2-sampled-decay-v2/hun_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00002-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00005.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d498e11de6564c4cbb251e0e0e3323f83b9708a5d0170bdbcf55bbff50e0e8b2
3
+ size 67108381
train/fineweb2-sampled-decay-v2/hun_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00002-batch_0001-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67108156, "hashes": {}}, "samples": 16915, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 5945324, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 67095372, "hashes": {}}, "samples": 14392, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 6529310, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00002.mds", "bytes": 67082315, "hashes": {}}, "samples": 12457, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00002.mds.zstd", "bytes": 7056221, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00003.mds", "bytes": 67108797, "hashes": {}}, "samples": 14693, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00003.mds.zstd", "bytes": 8008725, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00004.mds", "bytes": 67088319, "hashes": {}}, "samples": 13464, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00004.mds.zstd", "bytes": 6338781, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00005.mds", "bytes": 57923388, "hashes": {}}, "samples": 13868, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00005.mds.zstd", "bytes": 5131629, "hashes": {}}}], "version": 2}
train/fineweb2-sampled-decay-v2/hun_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00002-batch_0001-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 96983905, "total_tokens_skipped": 438, "percentiles": {"0th": 90, "10th": 152, "20th": 216, "30th": 308, "40th": 389, "50th": 474, "60th": 628, "70th": 892, "80th": 1394, "90th": 2740, "95th": 5328, "99th": 8191, "100th": 8191}}
train/fineweb2-sampled-decay-v2/hun_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00002-batch_0001-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/fineweb2-sampled-decay-v2/hun_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00004-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:29f852d3a17364db9971ad298fc89d110e6f599fd02e64c93f1853f77958181d
3
+ size 67108406
train/fineweb2-sampled-decay-v2/hun_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00004-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2ce30e5670cd560a9595e8164db2af21e5dcb33a6a33f3ba9254b532d6a1ceb7
3
+ size 67106692
train/fineweb2-sampled-decay-v2/hun_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00004-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/shard.00003.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a062a393f05f65cb86ad64ce62405244c3075feaafef81e0aa11fe5f0d590b2e
3
+ size 67103364
train/fineweb2-sampled-decay-v2/hun_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00004-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/shard.00004.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c4fea1e8119f245311a004a9025ad8e3eb1f57dc7361a9143439028a9654a689
3
+ size 67104760
train/fineweb2-sampled-decay-v2/hun_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00004-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/shard.00005.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:520f902d1ac786e9ca4b54521d55d08aed0a825293eda05cedd0b7dad0f14a87
3
+ size 28384129
train/fineweb2-sampled-decay-v2/hun_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00006-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:87a194af5063a5c0d50c9e9d943251b282001e1a25f1004753263c5f3e99b9ee
3
+ size 67108142
train/fineweb2-sampled-decay-v2/hun_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00006-batch_0000-tokenized-chunked-8192-512-32-backfill-nodups/shard.00004.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:855484c40533a113ff590cef330f51c107b0b11af71eb8a4fa7335d56bad80b4
3
+ size 44248132
train/fineweb2-sampled-decay-v2/hun_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00009-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:52b6d1dc06cc993ed4572bf9702fd020195c30408227ea2f348a1ee905fffeb3
3
+ size 67107883
train/fineweb2-sampled-decay-v2/hun_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00009-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:28c328cad25b41a16401666689425537fe39d995c466108460457576260b7698
3
+ size 67099961
train/fineweb2-sampled-decay-v2/hun_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00009-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/shard.00004.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:02c27fe314e682eb596262cd973028ef6426399a72054fee6613669e95fe861c
3
+ size 30014678
train/fineweb2-sampled-decay-v2/hun_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00013-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0a92ff10162396de6c3ac70ba135238aa357ab85c0c9edad402c654b100aa565
3
+ size 67089330
train/fineweb2-sampled-decay-v2/hun_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00013-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c627ef2273356ede7a942027d794a887c3acf44b5dc1168c94b15c5a10b9881b
3
+ size 25979875
train/fineweb2-sampled-decay-v2/hun_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00023-batch_0003-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:366b9d1451bac6f3f6b1eb40ee766ba5199ea310785ec37266ad71b5106461ca
3
+ size 26215445
train/fineweb2-sampled-decay-v2/hun_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00029-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67102496, "hashes": {}}, "samples": 14820, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 9833801, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 67105518, "hashes": {}}, "samples": 14572, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 10009491, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00002.mds", "bytes": 67108426, "hashes": {}}, "samples": 13356, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00002.mds.zstd", "bytes": 10504903, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00003.mds", "bytes": 67089713, "hashes": {}}, "samples": 13725, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00003.mds.zstd", "bytes": 10375446, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00004.mds", "bytes": 3236876, "hashes": {}}, "samples": 676, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00004.mds.zstd", "bytes": 531645, "hashes": {}}}], "version": 2}
train/fineweb2-sampled-decay-v2/hun_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00029-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/shard.00004.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2e6d814e61b66ae1cfa8f0227552073549c3280cac07e39c3926cbe22f376d57
3
+ size 3236876
train/fineweb2-sampled-decay-v2/hun_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00029-batch_0002-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 66999413, "total_tokens_skipped": 173, "percentiles": {"0th": 91, "10th": 157, "20th": 226, "30th": 324, "40th": 410, "50th": 501, "60th": 678, "70th": 979, "80th": 1531, "90th": 2915, "95th": 5165, "99th": 8190, "100th": 8191}}