Datasets:

ArXiv:
License:
orionweller commited on
Commit
8bf4337
·
verified ·
1 Parent(s): ecb51e0

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. train/fineweb2-sampled-decay-v2/ckb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0010-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  2. train/fineweb2-sampled-decay-v2/ckb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0010-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  3. train/fineweb2-sampled-decay-v2/ckb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0010-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  4. train/fineweb2-sampled-decay-v2/ckb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0013-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  5. train/fineweb2-sampled-decay-v2/ckb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0013-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  6. train/fineweb2-sampled-decay-v2/ckb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0013-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  7. train/fineweb2-sampled-decay-v2/ckb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0018-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  8. train/fineweb2-sampled-decay-v2/ckb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0018-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  9. train/fineweb2-sampled-decay-v2/ckb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0018-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  10. train/fineweb2-sampled-decay-v2/ckb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0020-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  11. train/fineweb2-sampled-decay-v2/ckb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0020-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  12. train/fineweb2-sampled-decay-v2/ckb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0020-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  13. train/fineweb2-sampled-decay-v2/ckb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0023-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  14. train/fineweb2-sampled-decay-v2/ckb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0023-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  15. train/fineweb2-sampled-decay-v2/ckb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0023-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  16. train/fineweb2-sampled-decay-v2/ckb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/index.json +1 -0
  17. train/fineweb2-sampled-decay-v2/ckb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/num_tokens.json +3 -0
  18. train/fineweb2-sampled-decay-v2/ckb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/sampling_summary.json +14 -0
  19. train/fineweb2-sampled-decay-v2/ell_Grek-tokenized-chunked-8192-512-32-backfill-nodups-sampled/num_tokens.json +3 -0
  20. train/fineweb2-sampled-decay-v2/glg_Latn_train-sampled/batch_0033-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds +3 -0
  21. train/fineweb2-sampled-decay-v2/glg_Latn_train-sampled/batch_0056-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds +3 -0
  22. train/fineweb2-sampled-decay-v2/glg_Latn_train-sampled/batch_0070-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds +3 -0
  23. train/fineweb2-sampled-decay-v2/glg_Latn_train-sampled/batch_0080-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds +3 -0
  24. train/fineweb2-sampled-decay-v2/glg_Latn_train-sampled/batch_0088-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds +3 -0
  25. train/fineweb2-sampled-decay-v2/glg_Latn_train-sampled/batch_0094-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds +3 -0
  26. train/fineweb2-sampled-decay-v2/glg_Latn_train-sampled/batch_0101-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds +3 -0
  27. train/fineweb2-sampled-decay-v2/glg_Latn_train-sampled/batch_0117-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds +3 -0
  28. train/fineweb2-sampled-decay-v2/glg_Latn_train-sampled/batch_0160-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds +3 -0
  29. train/fineweb2-sampled-decay-v2/glg_Latn_train-sampled/batch_0162-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds +3 -0
  30. train/fineweb2-sampled-decay-v2/glg_Latn_train-sampled/batch_0175-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds +3 -0
  31. train/fineweb2-sampled-decay-v2/glg_Latn_train-sampled/batch_0178-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds +3 -0
  32. train/fineweb2-sampled-decay-v2/glg_Latn_train-sampled/batch_0204-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds +3 -0
  33. train/fineweb2-sampled-decay-v2/glg_Latn_train-sampled/batch_0207-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds +3 -0
  34. train/fineweb2-sampled-decay-v2/glg_Latn_train-sampled/batch_0210-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds +3 -0
  35. train/fineweb2-sampled-decay-v2/glg_Latn_train-sampled/batch_0214-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds +3 -0
  36. train/fineweb2-sampled-decay-v2/glg_Latn_train-sampled/batch_0250-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds +3 -0
  37. train/fineweb2-sampled-decay-v2/glg_Latn_train-sampled/batch_0251-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds +3 -0
  38. train/fineweb2-sampled-decay-v2/glg_Latn_train-sampled/batch_0264-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds +3 -0
  39. train/fineweb2-sampled-decay-v2/hrv_Latn_train-sampled/batch_0032-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  40. train/fineweb2-sampled-decay-v2/hrv_Latn_train-sampled/batch_0061-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  41. train/fineweb2-sampled-decay-v2/hrv_Latn_train-sampled/batch_0061-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  42. train/fineweb2-sampled-decay-v2/hrv_Latn_train-sampled/batch_0061-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  43. train/fineweb2-sampled-decay-v2/hrv_Latn_train-sampled/batch_0132-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  44. train/fineweb2-sampled-decay-v2/hrv_Latn_train-sampled/batch_0132-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  45. train/fineweb2-sampled-decay-v2/hrv_Latn_train-sampled/batch_0132-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  46. train/fineweb2-sampled-decay-v2/hrv_Latn_train-sampled/batch_0176-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds +3 -0
  47. train/fineweb2-sampled-decay-v2/hrv_Latn_train-sampled/batch_0179-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds +3 -0
  48. train/fineweb2-sampled-decay-v2/hrv_Latn_train-sampled/batch_0186-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  49. train/fineweb2-sampled-decay-v2/hrv_Latn_train-sampled/batch_0186-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  50. train/fineweb2-sampled-decay-v2/hrv_Latn_train-sampled/batch_0186-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
train/fineweb2-sampled-decay-v2/ckb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0010-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67100042, "hashes": {}}, "samples": 10526, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 6315786, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 67086284, "hashes": {}}, "samples": 11091, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 5951669, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00002.mds", "bytes": 67108093, "hashes": {}}, "samples": 11024, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00002.mds.zstd", "bytes": 6293726, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00003.mds", "bytes": 67108419, "hashes": {}}, "samples": 10866, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00003.mds.zstd", "bytes": 6307952, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00004.mds", "bytes": 67108316, "hashes": {}}, "samples": 9677, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00004.mds.zstd", "bytes": 6295449, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00005.mds", "bytes": 48946654, "hashes": {}}, "samples": 8818, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00005.mds.zstd", "bytes": 4497023, "hashes": {}}}], "version": 2}
train/fineweb2-sampled-decay-v2/ckb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0010-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 95122669, "total_tokens_skipped": 340, "percentiles": {"0th": 121, "10th": 329, "20th": 428, "30th": 519, "40th": 623, "50th": 772, "60th": 1008, "70th": 1458, "80th": 2212, "90th": 3883, "95th": 5952, "99th": 8190, "100th": 8191}}
train/fineweb2-sampled-decay-v2/ckb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0010-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/fineweb2-sampled-decay-v2/ckb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0013-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67106767, "hashes": {}}, "samples": 11747, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 7138319, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 67096351, "hashes": {}}, "samples": 11162, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 7174782, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00002.mds", "bytes": 67107136, "hashes": {}}, "samples": 11084, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00002.mds.zstd", "bytes": 7352602, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00003.mds", "bytes": 67107955, "hashes": {}}, "samples": 11315, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00003.mds.zstd", "bytes": 7039039, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00004.mds", "bytes": 44754493, "hashes": {}}, "samples": 7248, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00004.mds.zstd", "bytes": 4919279, "hashes": {}}}], "version": 2}
train/fineweb2-sampled-decay-v2/ckb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0013-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 77452457, "total_tokens_skipped": 96, "percentiles": {"0th": 127, "10th": 330, "20th": 421, "30th": 513, "40th": 623, "50th": 770, "60th": 986, "70th": 1397, "80th": 2144, "90th": 3532, "95th": 5504, "99th": 8190, "100th": 8191}}
train/fineweb2-sampled-decay-v2/ckb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0013-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/fineweb2-sampled-decay-v2/ckb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0018-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67080747, "hashes": {}}, "samples": 11715, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 9324324, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 67107662, "hashes": {}}, "samples": 11178, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 9372970, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00002.mds", "bytes": 67107031, "hashes": {}}, "samples": 11611, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00002.mds.zstd", "bytes": 9762261, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00003.mds", "bytes": 20805896, "hashes": {}}, "samples": 3481, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00003.mds.zstd", "bytes": 3121406, "hashes": {}}}], "version": 2}
train/fineweb2-sampled-decay-v2/ckb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0018-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 54917684, "total_tokens_skipped": 47, "percentiles": {"0th": 133, "10th": 329, "20th": 420, "30th": 511, "40th": 614, "50th": 752, "60th": 962, "70th": 1325, "80th": 2055, "90th": 3479, "95th": 5471, "99th": 8190, "100th": 8191}}
train/fineweb2-sampled-decay-v2/ckb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0018-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/fineweb2-sampled-decay-v2/ckb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0020-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67104922, "hashes": {}}, "samples": 11646, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 11016712, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 67102090, "hashes": {}}, "samples": 11818, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 11473893, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00002.mds", "bytes": 45625876, "hashes": {}}, "samples": 8286, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00002.mds.zstd", "bytes": 7986430, "hashes": {}}}], "version": 2}
train/fineweb2-sampled-decay-v2/ckb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0020-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 44450321, "total_tokens_skipped": 19, "percentiles": {"0th": 133, "10th": 329, "20th": 412, "30th": 502, "40th": 598, "50th": 729, "60th": 923, "70th": 1267, "80th": 1953, "90th": 3374, "95th": 5122, "99th": 8190, "100th": 8191}}
train/fineweb2-sampled-decay-v2/ckb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0020-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/fineweb2-sampled-decay-v2/ckb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0023-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67103849, "hashes": {}}, "samples": 12426, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 14871130, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 61569619, "hashes": {}}, "samples": 11365, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 14281133, "hashes": {}}}], "version": 2}
train/fineweb2-sampled-decay-v2/ckb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0023-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 31787838, "total_tokens_skipped": 17, "percentiles": {"0th": 138, "10th": 317, "20th": 398, "30th": 478, "40th": 574, "50th": 698, "60th": 875, "70th": 1168, "80th": 1784, "90th": 3160, "95th": 4953, "99th": 8190, "100th": 8191}}
train/fineweb2-sampled-decay-v2/ckb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0023-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/fineweb2-sampled-decay-v2/ckb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"version": 2, "shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "000_00000-batch_0018-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds", "bytes": 67080747, "hashes": {}}, "samples": 11715, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "000_00000-batch_0018-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds.zstd", "bytes": 9324324, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "000_00000-batch_0018-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds", "bytes": 67107662, "hashes": {}}, "samples": 11178, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "000_00000-batch_0018-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds.zstd", "bytes": 9372970, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "000_00000-batch_0018-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds", "bytes": 67107031, "hashes": {}}, "samples": 11611, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "000_00000-batch_0018-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds.zstd", "bytes": 9762261, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "000_00000-batch_0018-tokenized-chunked-8192-512-32-backfill-nodups/shard.00003.mds", "bytes": 20805896, "hashes": {}}, "samples": 3481, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "000_00000-batch_0018-tokenized-chunked-8192-512-32-backfill-nodups/shard.00003.mds.zstd", "bytes": 3121406, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "000_00000-batch_0023-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds", "bytes": 67103849, "hashes": {}}, "samples": 12426, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "000_00000-batch_0023-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds.zstd", "bytes": 14871130, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "000_00000-batch_0023-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds", "bytes": 61569619, "hashes": {}}, "samples": 11365, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "000_00000-batch_0023-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds.zstd", "bytes": 14281133, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "000_00000-batch_0013-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds", "bytes": 67106767, "hashes": {}}, "samples": 11747, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "000_00000-batch_0013-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds.zstd", "bytes": 7138319, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "000_00000-batch_0013-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds", "bytes": 67096351, "hashes": {}}, "samples": 11162, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "000_00000-batch_0013-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds.zstd", "bytes": 7174782, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "000_00000-batch_0013-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds", "bytes": 67107136, "hashes": {}}, "samples": 11084, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "000_00000-batch_0013-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds.zstd", "bytes": 7352602, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "000_00000-batch_0013-tokenized-chunked-8192-512-32-backfill-nodups/shard.00003.mds", "bytes": 67107955, "hashes": {}}, "samples": 11315, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "000_00000-batch_0013-tokenized-chunked-8192-512-32-backfill-nodups/shard.00003.mds.zstd", "bytes": 7039039, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "000_00000-batch_0013-tokenized-chunked-8192-512-32-backfill-nodups/shard.00004.mds", "bytes": 44754493, "hashes": {}}, "samples": 7248, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "000_00000-batch_0013-tokenized-chunked-8192-512-32-backfill-nodups/shard.00004.mds.zstd", "bytes": 4919279, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "000_00000-batch_0020-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds", "bytes": 67104922, "hashes": {}}, "samples": 11646, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "000_00000-batch_0020-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds.zstd", "bytes": 11016712, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "000_00000-batch_0020-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds", "bytes": 67102090, "hashes": {}}, "samples": 11818, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "000_00000-batch_0020-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds.zstd", "bytes": 11473893, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "000_00000-batch_0020-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds", "bytes": 45625876, "hashes": {}}, "samples": 8286, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "000_00000-batch_0020-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds.zstd", "bytes": 7986430, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "000_00000-batch_0010-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds", "bytes": 67100042, "hashes": {}}, "samples": 10526, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "000_00000-batch_0010-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds.zstd", "bytes": 6315786, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "000_00000-batch_0010-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds", "bytes": 67086284, "hashes": {}}, "samples": 11091, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "000_00000-batch_0010-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds.zstd", "bytes": 5951669, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "000_00000-batch_0010-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds", "bytes": 67108093, "hashes": {}}, "samples": 11024, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "000_00000-batch_0010-tokenized-chunked-8192-512-32-backfill-nodups/shard.00002.mds.zstd", "bytes": 6293726, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "000_00000-batch_0010-tokenized-chunked-8192-512-32-backfill-nodups/shard.00003.mds", "bytes": 67108419, "hashes": {}}, "samples": 10866, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "000_00000-batch_0010-tokenized-chunked-8192-512-32-backfill-nodups/shard.00003.mds.zstd", "bytes": 6307952, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "000_00000-batch_0010-tokenized-chunked-8192-512-32-backfill-nodups/shard.00004.mds", "bytes": 67108316, "hashes": {}}, "samples": 9677, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "000_00000-batch_0010-tokenized-chunked-8192-512-32-backfill-nodups/shard.00004.mds.zstd", "bytes": 6295449, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "000_00000-batch_0010-tokenized-chunked-8192-512-32-backfill-nodups/shard.00005.mds", "bytes": 48946654, "hashes": {}}, "samples": 8818, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "000_00000-batch_0010-tokenized-chunked-8192-512-32-backfill-nodups/shard.00005.mds.zstd", "bytes": 4497023, "hashes": {}}}]}
train/fineweb2-sampled-decay-v2/ckb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/num_tokens.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ {
2
+ "num_tokens": 303730969
3
+ }
train/fineweb2-sampled-decay-v2/ckb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/sampling_summary.json ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "total_tokens": 303730969,
3
+ "target_tokens": 273544121,
4
+ "num_unique_folders": 5,
5
+ "num_total_folders_copied": 5,
6
+ "num_upsampled_folders": 0,
7
+ "copied_folders": [
8
+ "/home/oweller2/my_scratch/bert24-data/data/mmbert-decay/train/fineweb2-sampled-decay-v2/ckb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0020-tokenized-chunked-8192-512-32-backfill-nodups",
9
+ "/home/oweller2/my_scratch/bert24-data/data/mmbert-decay/train/fineweb2-sampled-decay-v2/ckb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0023-tokenized-chunked-8192-512-32-backfill-nodups",
10
+ "/home/oweller2/my_scratch/bert24-data/data/mmbert-decay/train/fineweb2-sampled-decay-v2/ckb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0018-tokenized-chunked-8192-512-32-backfill-nodups",
11
+ "/home/oweller2/my_scratch/bert24-data/data/mmbert-decay/train/fineweb2-sampled-decay-v2/ckb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0013-tokenized-chunked-8192-512-32-backfill-nodups",
12
+ "/home/oweller2/my_scratch/bert24-data/data/mmbert-decay/train/fineweb2-sampled-decay-v2/ckb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/000_00000-batch_0010-tokenized-chunked-8192-512-32-backfill-nodups"
13
+ ]
14
+ }
train/fineweb2-sampled-decay-v2/ell_Grek-tokenized-chunked-8192-512-32-backfill-nodups-sampled/num_tokens.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ {
2
+ "num_tokens": 1109589627
3
+ }
train/fineweb2-sampled-decay-v2/glg_Latn_train-sampled/batch_0033-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:52afda518a3462eb72a9e5344ad59ef845cd0320f3ca12871709866c2a59edab
3
+ size 54868616
train/fineweb2-sampled-decay-v2/glg_Latn_train-sampled/batch_0056-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:72283c59f94b8705bc6a7f3e4990dc86cd06c5ab3af1aa56598a4516ca7cbcbf
3
+ size 54118077
train/fineweb2-sampled-decay-v2/glg_Latn_train-sampled/batch_0070-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:eb587cb08b8d83dcdd0b33753bb2a098e9444665dcba19009fdb4527b77d9c03
3
+ size 53434039
train/fineweb2-sampled-decay-v2/glg_Latn_train-sampled/batch_0080-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5b9c72de9405ba27e7440edc53e6ea0ba25411a81f8f0082af5969218a65aca6
3
+ size 51769980
train/fineweb2-sampled-decay-v2/glg_Latn_train-sampled/batch_0088-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b69c74d7a6378478980140d20047dc6cf438e1ae044c93b289de283239723c19
3
+ size 52204089
train/fineweb2-sampled-decay-v2/glg_Latn_train-sampled/batch_0094-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:85284b3eb8a2f6318b678ab4ccfd29972f7f26ff68b4de636eaec20ca6fea6c0
3
+ size 53779927
train/fineweb2-sampled-decay-v2/glg_Latn_train-sampled/batch_0101-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:db8c7e8b34711ae72c594c5aeec2dd78a5d58137f443051b51ddf7ddce30492a
3
+ size 53495501
train/fineweb2-sampled-decay-v2/glg_Latn_train-sampled/batch_0117-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:60aa00a8424ca56662c232c06f36fc343df03f7671ccd4039be0a2caba4c70d8
3
+ size 52254663
train/fineweb2-sampled-decay-v2/glg_Latn_train-sampled/batch_0160-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3302dc7776af22204b3501e4e3062ed69ca4bc0c5943ed6bf88187ab84ea7919
3
+ size 52641800
train/fineweb2-sampled-decay-v2/glg_Latn_train-sampled/batch_0162-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:810924ff0524a8dda3b171117cc20b8960f01ada5cf241ecef8cf9e0cba9b377
3
+ size 53242154
train/fineweb2-sampled-decay-v2/glg_Latn_train-sampled/batch_0175-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1a40ca1a7f7b472b0a934cfb8755db04eb3d59c1aea67671a96605779f5c8d8f
3
+ size 53907636
train/fineweb2-sampled-decay-v2/glg_Latn_train-sampled/batch_0178-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7d57cd9cf784af27ce97d1534ac960888459a5964a447c8c109fe6f23ae136e9
3
+ size 50458767
train/fineweb2-sampled-decay-v2/glg_Latn_train-sampled/batch_0204-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:68fd2b91847c782f97de3e26b92ad53f73c40a3c41599a071470a7ba5e346723
3
+ size 54614894
train/fineweb2-sampled-decay-v2/glg_Latn_train-sampled/batch_0207-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7d8fccb92c2e64908e69ef92610082bde49f939036bbd096e9e35102412208b9
3
+ size 52176912
train/fineweb2-sampled-decay-v2/glg_Latn_train-sampled/batch_0210-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9eaf08edb94687904efa7c19af32c2a201f9d6bdd5f92893fe426d22242244ed
3
+ size 54909850
train/fineweb2-sampled-decay-v2/glg_Latn_train-sampled/batch_0214-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0d6e02176c3c75427a37791c69578f9960c4f7a76ed50b6d13ba2fd941fa5114
3
+ size 52042703
train/fineweb2-sampled-decay-v2/glg_Latn_train-sampled/batch_0250-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:122e42a63b5e85c0336fc588bafc6caea795620ed35e510de61ad0f95c0e0e5a
3
+ size 56981232
train/fineweb2-sampled-decay-v2/glg_Latn_train-sampled/batch_0251-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c8686f4b9f30802d685b5730b1c894f0c44d15bd48a9015e71ef7882541411c3
3
+ size 56901914
train/fineweb2-sampled-decay-v2/glg_Latn_train-sampled/batch_0264-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:da6b639a6c9b7671bcc4102f64e5de6f0f9a4970bb242dcfb6d0e1ff096162de
3
+ size 58575069
train/fineweb2-sampled-decay-v2/hrv_Latn_train-sampled/batch_0032-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/fineweb2-sampled-decay-v2/hrv_Latn_train-sampled/batch_0061-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67088910, "hashes": {}}, "samples": 16416, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 12908178, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 16636366, "hashes": {}}, "samples": 3846, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 3196556, "hashes": {}}}], "version": 2}
train/fineweb2-sampled-decay-v2/hrv_Latn_train-sampled/batch_0061-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 20607901, "total_tokens_skipped": 0, "percentiles": {"0th": 95, "10th": 193, "20th": 278, "30th": 366, "40th": 462, "50th": 591, "60th": 754, "70th": 982, "80th": 1345, "90th": 2148, "95th": 3285, "99th": 8190, "100th": 8191}}
train/fineweb2-sampled-decay-v2/hrv_Latn_train-sampled/batch_0061-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/fineweb2-sampled-decay-v2/hrv_Latn_train-sampled/batch_0132-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67106142, "hashes": {}}, "samples": 16866, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 14468425, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 26003639, "hashes": {}}, "samples": 3531, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 5245944, "hashes": {}}}], "version": 2}
train/fineweb2-sampled-decay-v2/hrv_Latn_train-sampled/batch_0132-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 22951773, "total_tokens_skipped": 0, "percentiles": {"0th": 89, "10th": 206, "20th": 299, "30th": 395, "40th": 511, "50th": 655, "60th": 836, "70th": 1099, "80th": 1507, "90th": 2377, "95th": 3707, "99th": 8190, "100th": 8191}}
train/fineweb2-sampled-decay-v2/hrv_Latn_train-sampled/batch_0132-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/fineweb2-sampled-decay-v2/hrv_Latn_train-sampled/batch_0176-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9d5c9e6d0465a3ff646c870f9fb2573f3562ac9a1e24cc860b26283b8f59bcb4
3
+ size 20179733
train/fineweb2-sampled-decay-v2/hrv_Latn_train-sampled/batch_0179-tokenized-chunked-8192-512-32-backfill-nodups/shard.00001.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c6d9cdb3ac44d6e4827f9893eed36280f5b85ae98605ad096d1eaca020351c4e
3
+ size 13214803
train/fineweb2-sampled-decay-v2/hrv_Latn_train-sampled/batch_0186-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67107750, "hashes": {}}, "samples": 17101, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 13548127, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 14793228, "hashes": {}}, "samples": 3211, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 3191559, "hashes": {}}}], "version": 2}
train/fineweb2-sampled-decay-v2/hrv_Latn_train-sampled/batch_0186-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 20151076, "total_tokens_skipped": 25, "percentiles": {"0th": 82, "10th": 192, "20th": 269, "30th": 349, "40th": 450, "50th": 561, "60th": 715, "70th": 933, "80th": 1266, "90th": 2091, "95th": 3325, "99th": 8190, "100th": 8191}}
train/fineweb2-sampled-decay-v2/hrv_Latn_train-sampled/batch_0186-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff