Datasets:

ArXiv:
License:
orionweller commited on
Commit
2f69855
·
verified ·
1 Parent(s): 00ba7b7

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. train/fineweb2-sampled-decay-v2/ekk_Latn_train-sampled/batch_0013-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  2. train/fineweb2-sampled-decay-v2/ekk_Latn_train-sampled/batch_0013-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  3. train/fineweb2-sampled-decay-v2/ekk_Latn_train-sampled/batch_0013-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  4. train/fineweb2-sampled-decay-v2/ekk_Latn_train-sampled/batch_0041-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  5. train/fineweb2-sampled-decay-v2/ekk_Latn_train-sampled/batch_0066-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  6. train/fineweb2-sampled-decay-v2/ekk_Latn_train-sampled/batch_0066-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  7. train/fineweb2-sampled-decay-v2/ekk_Latn_train-sampled/batch_0066-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  8. train/fineweb2-sampled-decay-v2/ekk_Latn_train-sampled/batch_0117-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  9. train/fineweb2-sampled-decay-v2/ekk_Latn_train-sampled/batch_0117-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  10. train/fineweb2-sampled-decay-v2/ekk_Latn_train-sampled/batch_0117-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  11. train/fineweb2-sampled-decay-v2/ekk_Latn_train-sampled/batch_0145-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  12. train/fineweb2-sampled-decay-v2/ekk_Latn_train-sampled/batch_0145-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  13. train/fineweb2-sampled-decay-v2/ekk_Latn_train-sampled/batch_0145-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  14. train/fineweb2-sampled-decay-v2/ekk_Latn_train-sampled/batch_0197-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  15. train/fineweb2-sampled-decay-v2/ekk_Latn_train-sampled/batch_0197-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds +3 -0
  16. train/fineweb2-sampled-decay-v2/ekk_Latn_train-sampled/batch_0197-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  17. train/fineweb2-sampled-decay-v2/ekk_Latn_train-sampled/batch_0197-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  18. train/fineweb2-sampled-decay-v2/ekk_Latn_train-sampled/batch_0250-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  19. train/fineweb2-sampled-decay-v2/ekk_Latn_train-sampled/batch_0250-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  20. train/fineweb2-sampled-decay-v2/ekk_Latn_train-sampled/batch_0250-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  21. train/fineweb2-sampled-decay-v2/ekk_Latn_train-sampled/batch_0263-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  22. train/fineweb2-sampled-decay-v2/ekk_Latn_train-sampled/batch_0263-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds +3 -0
  23. train/fineweb2-sampled-decay-v2/ekk_Latn_train-sampled/batch_0263-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  24. train/fineweb2-sampled-decay-v2/ekk_Latn_train-sampled/batch_0263-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  25. train/fineweb2-sampled-decay-v2/ekk_Latn_train-sampled/batch_0312-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  26. train/fineweb2-sampled-decay-v2/ekk_Latn_train-sampled/batch_0312-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  27. train/fineweb2-sampled-decay-v2/ekk_Latn_train-sampled/batch_0312-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  28. train/fineweb2-sampled-decay-v2/ekk_Latn_train-sampled/batch_0314-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  29. train/fineweb2-sampled-decay-v2/ekk_Latn_train-sampled/batch_0314-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  30. train/fineweb2-sampled-decay-v2/ekk_Latn_train-sampled/batch_0314-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  31. train/fineweb2-sampled-decay-v2/ekk_Latn_train-sampled/batch_0350-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  32. train/fineweb2-sampled-decay-v2/ekk_Latn_train-sampled/batch_0350-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds +3 -0
  33. train/fineweb2-sampled-decay-v2/ekk_Latn_train-sampled/batch_0350-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  34. train/fineweb2-sampled-decay-v2/ekk_Latn_train-sampled/batch_0350-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  35. train/fineweb2-sampled-decay-v2/ekk_Latn_train-sampled/batch_0374-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  36. train/fineweb2-sampled-decay-v2/ekk_Latn_train-sampled/batch_0374-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  37. train/fineweb2-sampled-decay-v2/ekk_Latn_train-sampled/batch_0374-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  38. train/fineweb2-sampled-decay-v2/ekk_Latn_train-sampled/batch_0381-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  39. train/fineweb2-sampled-decay-v2/ekk_Latn_train-sampled/batch_0381-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  40. train/fineweb2-sampled-decay-v2/ekk_Latn_train-sampled/batch_0381-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  41. train/fineweb2-sampled-decay-v2/ekk_Latn_train-sampled/batch_0392-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  42. train/fineweb2-sampled-decay-v2/ekk_Latn_train-sampled/batch_0392-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  43. train/fineweb2-sampled-decay-v2/ekk_Latn_train-sampled/batch_0392-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  44. train/fineweb2-sampled-decay-v2/ekk_Latn_train-sampled/batch_0446-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  45. train/fineweb2-sampled-decay-v2/ekk_Latn_train-sampled/batch_0446-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  46. train/fineweb2-sampled-decay-v2/ekk_Latn_train-sampled/batch_0446-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  47. train/fineweb2-sampled-decay-v2/ekk_Latn_train-sampled/batch_0487-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  48. train/fineweb2-sampled-decay-v2/ekk_Latn_train-sampled/batch_0487-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  49. train/fineweb2-sampled-decay-v2/ekk_Latn_train-sampled/batch_0487-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  50. train/fineweb2-sampled-decay-v2/ekk_Latn_train-sampled/batch_0493-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
train/fineweb2-sampled-decay-v2/ekk_Latn_train-sampled/batch_0013-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67107777, "hashes": {}}, "samples": 13908, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 13744372, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 32184095, "hashes": {}}, "samples": 6696, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 6667311, "hashes": {}}}], "version": 2}
train/fineweb2-sampled-decay-v2/ekk_Latn_train-sampled/batch_0013-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 24494130, "total_tokens_skipped": 142, "percentiles": {"0th": 98, "10th": 200, "20th": 269, "30th": 354, "40th": 452, "50th": 579, "60th": 751, "70th": 1023, "80th": 1508, "90th": 2723, "95th": 5051, "99th": 8190, "100th": 8191}}
train/fineweb2-sampled-decay-v2/ekk_Latn_train-sampled/batch_0013-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/fineweb2-sampled-decay-v2/ekk_Latn_train-sampled/batch_0041-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 21722378, "total_tokens_skipped": 52, "percentiles": {"0th": 102, "10th": 192, "20th": 260, "30th": 335, "40th": 426, "50th": 541, "60th": 698, "70th": 921, "80th": 1312, "90th": 2387, "95th": 4053, "99th": 8190, "100th": 8191}}
train/fineweb2-sampled-decay-v2/ekk_Latn_train-sampled/batch_0066-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67107554, "hashes": {}}, "samples": 14638, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 13268800, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 25025043, "hashes": {}}, "samples": 5874, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 5015393, "hashes": {}}}], "version": 2}
train/fineweb2-sampled-decay-v2/ekk_Latn_train-sampled/batch_0066-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 22705810, "total_tokens_skipped": 66, "percentiles": {"0th": 90, "10th": 194, "20th": 265, "30th": 341, "40th": 443, "50th": 569, "60th": 729, "70th": 963, "80th": 1356, "90th": 2340, "95th": 4315, "99th": 8190, "100th": 8191}}
train/fineweb2-sampled-decay-v2/ekk_Latn_train-sampled/batch_0066-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/fineweb2-sampled-decay-v2/ekk_Latn_train-sampled/batch_0117-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67079433, "hashes": {}}, "samples": 14437, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 13947528, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 31748395, "hashes": {}}, "samples": 6165, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 7348945, "hashes": {}}}], "version": 2}
train/fineweb2-sampled-decay-v2/ekk_Latn_train-sampled/batch_0117-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 24378177, "total_tokens_skipped": 77, "percentiles": {"0th": 96, "10th": 197, "20th": 265, "30th": 345, "40th": 444, "50th": 568, "60th": 744, "70th": 1018, "80th": 1465, "90th": 2701, "95th": 5146, "99th": 8190, "100th": 8191}}
train/fineweb2-sampled-decay-v2/ekk_Latn_train-sampled/batch_0117-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/fineweb2-sampled-decay-v2/ekk_Latn_train-sampled/batch_0145-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67108185, "hashes": {}}, "samples": 13703, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 15380187, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 32727826, "hashes": {}}, "samples": 6975, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 6550305, "hashes": {}}}], "version": 2}
train/fineweb2-sampled-decay-v2/ekk_Latn_train-sampled/batch_0145-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 24629053, "total_tokens_skipped": 62, "percentiles": {"0th": 96, "10th": 193, "20th": 260, "30th": 343, "40th": 439, "50th": 564, "60th": 746, "70th": 990, "80th": 1463, "90th": 2704, "95th": 5411, "99th": 8191, "100th": 8191}}
train/fineweb2-sampled-decay-v2/ekk_Latn_train-sampled/batch_0145-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/fineweb2-sampled-decay-v2/ekk_Latn_train-sampled/batch_0197-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67108148, "hashes": {}}, "samples": 17949, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 10850002, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 8549230, "hashes": {}}, "samples": 2217, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 1430972, "hashes": {}}}], "version": 2}
train/fineweb2-sampled-decay-v2/ekk_Latn_train-sampled/batch_0197-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:426b34a9ccaaab05bcc33d79534032111812958a4ead6119e775a5066e7682bc
3
+ size 8549230
train/fineweb2-sampled-decay-v2/ekk_Latn_train-sampled/batch_0197-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 18592535, "total_tokens_skipped": 0, "percentiles": {"0th": 99, "10th": 196, "20th": 265, "30th": 342, "40th": 441, "50th": 553, "60th": 696, "70th": 883, "80th": 1196, "90th": 1939, "95th": 2972, "99th": 7061, "100th": 8191}}
train/fineweb2-sampled-decay-v2/ekk_Latn_train-sampled/batch_0197-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/fineweb2-sampled-decay-v2/ekk_Latn_train-sampled/batch_0250-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67106337, "hashes": {}}, "samples": 13421, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 12829162, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 30313902, "hashes": {}}, "samples": 7002, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 5704344, "hashes": {}}}], "version": 2}
train/fineweb2-sampled-decay-v2/ekk_Latn_train-sampled/batch_0250-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 24029063, "total_tokens_skipped": 0, "percentiles": {"0th": 99, "10th": 203, "20th": 280, "30th": 370, "40th": 486, "50th": 635, "60th": 811, "70th": 1080, "80th": 1535, "90th": 2730, "95th": 4423, "99th": 8190, "100th": 8191}}
train/fineweb2-sampled-decay-v2/ekk_Latn_train-sampled/batch_0250-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/fineweb2-sampled-decay-v2/ekk_Latn_train-sampled/batch_0263-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67101981, "hashes": {}}, "samples": 17092, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 11180713, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 12870783, "hashes": {}}, "samples": 3122, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 2210671, "hashes": {}}}], "version": 2}
train/fineweb2-sampled-decay-v2/ekk_Latn_train-sampled/batch_0263-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d6e85da8c928ca9c6681685a5ad57439445c8ddef93ee84c6979060ab9179dab
3
+ size 12870783
train/fineweb2-sampled-decay-v2/ekk_Latn_train-sampled/batch_0263-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 19670580, "total_tokens_skipped": 66, "percentiles": {"0th": 100, "10th": 200, "20th": 268, "30th": 347, "40th": 451, "50th": 577, "60th": 728, "70th": 941, "80th": 1308, "90th": 2074, "95th": 3109, "99th": 7870, "100th": 8191}}
train/fineweb2-sampled-decay-v2/ekk_Latn_train-sampled/batch_0263-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/fineweb2-sampled-decay-v2/ekk_Latn_train-sampled/batch_0312-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67099036, "hashes": {}}, "samples": 16183, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 11689189, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 18949906, "hashes": {}}, "samples": 4168, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 3515064, "hashes": {}}}], "version": 2}
train/fineweb2-sampled-decay-v2/ekk_Latn_train-sampled/batch_0312-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 21187403, "total_tokens_skipped": 2, "percentiles": {"0th": 91, "10th": 203, "20th": 273, "30th": 361, "40th": 458, "50th": 572, "60th": 732, "70th": 953, "80th": 1313, "90th": 2217, "95th": 3580, "99th": 8190, "100th": 8191}}
train/fineweb2-sampled-decay-v2/ekk_Latn_train-sampled/batch_0312-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/fineweb2-sampled-decay-v2/ekk_Latn_train-sampled/batch_0314-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67090289, "hashes": {}}, "samples": 15475, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 11715630, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 20355342, "hashes": {}}, "samples": 4934, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 3334297, "hashes": {}}}], "version": 2}
train/fineweb2-sampled-decay-v2/ekk_Latn_train-sampled/batch_0314-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 21535596, "total_tokens_skipped": 28, "percentiles": {"0th": 94, "10th": 206, "20th": 285, "30th": 366, "40th": 460, "50th": 579, "60th": 732, "70th": 954, "80th": 1330, "90th": 2250, "95th": 3671, "99th": 8190, "100th": 8191}}
train/fineweb2-sampled-decay-v2/ekk_Latn_train-sampled/batch_0314-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/fineweb2-sampled-decay-v2/ekk_Latn_train-sampled/batch_0350-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67106642, "hashes": {}}, "samples": 17983, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 11801956, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 8927939, "hashes": {}}, "samples": 2227, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 1629909, "hashes": {}}}], "version": 2}
train/fineweb2-sampled-decay-v2/ekk_Latn_train-sampled/batch_0350-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9ffb39b2b1d5c8e8d7cef4bad1944a4886377ef123258e771519787a7b93c28b
3
+ size 8927939
train/fineweb2-sampled-decay-v2/ekk_Latn_train-sampled/batch_0350-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 18686117, "total_tokens_skipped": 0, "percentiles": {"0th": 99, "10th": 198, "20th": 267, "30th": 343, "40th": 443, "50th": 553, "60th": 699, "70th": 901, "80th": 1219, "90th": 1936, "95th": 2915, "99th": 7708, "100th": 8191}}
train/fineweb2-sampled-decay-v2/ekk_Latn_train-sampled/batch_0350-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/fineweb2-sampled-decay-v2/ekk_Latn_train-sampled/batch_0374-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67108821, "hashes": {}}, "samples": 13251, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 13144197, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 27352652, "hashes": {}}, "samples": 7241, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 4724881, "hashes": {}}}], "version": 2}
train/fineweb2-sampled-decay-v2/ekk_Latn_train-sampled/batch_0374-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 23288252, "total_tokens_skipped": 0, "percentiles": {"0th": 105, "10th": 205, "20th": 279, "30th": 358, "40th": 455, "50th": 587, "60th": 751, "70th": 990, "80th": 1436, "90th": 2516, "95th": 4329, "99th": 8190, "100th": 8191}}
train/fineweb2-sampled-decay-v2/ekk_Latn_train-sampled/batch_0374-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/fineweb2-sampled-decay-v2/ekk_Latn_train-sampled/batch_0381-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67086429, "hashes": {}}, "samples": 13565, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 13132229, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 28319875, "hashes": {}}, "samples": 6900, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 5104225, "hashes": {}}}], "version": 2}
train/fineweb2-sampled-decay-v2/ekk_Latn_train-sampled/batch_0381-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 23524934, "total_tokens_skipped": 118, "percentiles": {"0th": 100, "10th": 201, "20th": 274, "30th": 359, "40th": 465, "50th": 594, "60th": 770, "70th": 1044, "80th": 1484, "90th": 2583, "95th": 4333, "99th": 8190, "100th": 8191}}
train/fineweb2-sampled-decay-v2/ekk_Latn_train-sampled/batch_0381-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/fineweb2-sampled-decay-v2/ekk_Latn_train-sampled/batch_0392-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67105575, "hashes": {}}, "samples": 13382, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 13399821, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 38485986, "hashes": {}}, "samples": 7294, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 7340233, "hashes": {}}}], "version": 2}
train/fineweb2-sampled-decay-v2/ekk_Latn_train-sampled/batch_0392-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 26067857, "total_tokens_skipped": 5, "percentiles": {"0th": 98, "10th": 204, "20th": 278, "30th": 366, "40th": 482, "50th": 626, "60th": 817, "70th": 1102, "80th": 1618, "90th": 2940, "95th": 5474, "99th": 8190, "100th": 8191}}
train/fineweb2-sampled-decay-v2/ekk_Latn_train-sampled/batch_0392-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/fineweb2-sampled-decay-v2/ekk_Latn_train-sampled/batch_0446-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67108749, "hashes": {}}, "samples": 15299, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 12347841, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 20890361, "hashes": {}}, "samples": 5174, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 4001492, "hashes": {}}}], "version": 2}
train/fineweb2-sampled-decay-v2/ekk_Latn_train-sampled/batch_0446-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 21673021, "total_tokens_skipped": 0, "percentiles": {"0th": 96, "10th": 198, "20th": 270, "30th": 348, "40th": 444, "50th": 559, "60th": 710, "70th": 913, "80th": 1293, "90th": 2223, "95th": 3933, "99th": 8190, "100th": 8191}}
train/fineweb2-sampled-decay-v2/ekk_Latn_train-sampled/batch_0446-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/fineweb2-sampled-decay-v2/ekk_Latn_train-sampled/batch_0487-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67107462, "hashes": {}}, "samples": 12303, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 16803736, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 41055953, "hashes": {}}, "samples": 8455, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 9673621, "hashes": {}}}], "version": 2}
train/fineweb2-sampled-decay-v2/ekk_Latn_train-sampled/batch_0487-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 26709602, "total_tokens_skipped": 72, "percentiles": {"0th": 99, "10th": 196, "20th": 263, "30th": 343, "40th": 449, "50th": 590, "60th": 779, "70th": 1050, "80th": 1586, "90th": 3202, "95th": 6433, "99th": 8191, "100th": 8191}}
train/fineweb2-sampled-decay-v2/ekk_Latn_train-sampled/batch_0487-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/fineweb2-sampled-decay-v2/ekk_Latn_train-sampled/batch_0493-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67108857, "hashes": {}}, "samples": 12410, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 16478421, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 43018717, "hashes": {}}, "samples": 8365, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 10537684, "hashes": {}}}], "version": 2}