Datasets:

ArXiv:
License:
orionweller commited on
Commit
55accd7
·
verified ·
1 Parent(s): 78c0626

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. train/multi-wikis-sampled-decay/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_1-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  2. train/multi-wikis-sampled-decay/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_1-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  3. train/multi-wikis-sampled-decay/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_1-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  4. train/multi-wikis-sampled-decay/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_11-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  5. train/multi-wikis-sampled-decay/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_6-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  6. train/multi-wikis-sampled-decay/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_6-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  7. train/multi-wikis-sampled-decay/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_6-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  8. train/multi-wikis-sampled-decay/azj_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_1-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  9. train/multi-wikis-sampled-decay/azj_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_1-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  10. train/multi-wikis-sampled-decay/azj_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_1-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  11. train/multi-wikis-sampled-decay/azj_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_2-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  12. train/multi-wikis-sampled-decay/azj_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_2-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  13. train/multi-wikis-sampled-decay/azj_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_2-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  14. train/multi-wikis-sampled-decay/azj_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/index.json +1 -0
  15. train/multi-wikis-sampled-decay/azj_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/num_tokens.json +3 -0
  16. train/multi-wikis-sampled-decay/azj_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/sampling_summary.json +11 -0
  17. train/multi-wikis-sampled-decay/ben_Beng-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_1-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  18. train/multi-wikis-sampled-decay/ben_Beng-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_1-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  19. train/multi-wikis-sampled-decay/ben_Beng-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_1-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  20. train/multi-wikis-sampled-decay/ben_Beng-tokenized-chunked-8192-512-32-backfill-nodups-sampled/index.json +1 -0
  21. train/multi-wikis-sampled-decay/ben_Beng-tokenized-chunked-8192-512-32-backfill-nodups-sampled/num_tokens.json +3 -0
  22. train/multi-wikis-sampled-decay/ben_Beng-tokenized-chunked-8192-512-32-backfill-nodups-sampled/sampling_summary.json +10 -0
  23. train/multi-wikis-sampled-decay/bul_Cyrl-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_4-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds +3 -0
  24. train/multi-wikis-sampled-decay/ces_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_2-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  25. train/multi-wikis-sampled-decay/ces_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_2-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  26. train/multi-wikis-sampled-decay/ces_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_2-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  27. train/multi-wikis-sampled-decay/ces_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_3-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  28. train/multi-wikis-sampled-decay/ces_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_3-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  29. train/multi-wikis-sampled-decay/ces_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_3-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  30. train/multi-wikis-sampled-decay/ces_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/index.json +1 -0
  31. train/multi-wikis-sampled-decay/ces_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/num_tokens.json +3 -0
  32. train/multi-wikis-sampled-decay/ces_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/sampling_summary.json +11 -0
  33. train/multi-wikis-sampled-decay/ell_Grek-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_0-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  34. train/multi-wikis-sampled-decay/ell_Grek-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_0-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  35. train/multi-wikis-sampled-decay/ell_Grek-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_0-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  36. train/multi-wikis-sampled-decay/ell_Grek-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_3-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  37. train/multi-wikis-sampled-decay/ell_Grek-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_3-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  38. train/multi-wikis-sampled-decay/ell_Grek-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_3-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  39. train/multi-wikis-sampled-decay/ell_Grek-tokenized-chunked-8192-512-32-backfill-nodups-sampled/index.json +1 -0
  40. train/multi-wikis-sampled-decay/ell_Grek-tokenized-chunked-8192-512-32-backfill-nodups-sampled/num_tokens.json +3 -0
  41. train/multi-wikis-sampled-decay/ell_Grek-tokenized-chunked-8192-512-32-backfill-nodups-sampled/sampling_summary.json +11 -0
  42. train/multi-wikis-sampled-decay/fas_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/index.json +1 -0
  43. train/multi-wikis-sampled-decay/fas_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/sampling_summary.json +12 -0
  44. train/multi-wikis-sampled-decay/khk_Cyrl-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_0-tokenized-chunked-8192-512-32-backfill-nodups-upsample-67d49d7c/token_decile.json +0 -0
  45. train/multi-wikis-sampled-decay/kor_Hang-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_0-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds +3 -0
  46. train/multi-wikis-sampled-decay/lvs_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_2-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds +3 -0
  47. train/multi-wikis-sampled-decay/nob_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_2-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  48. train/multi-wikis-sampled-decay/nob_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_2-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  49. train/multi-wikis-sampled-decay/nob_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_2-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  50. train/multi-wikis-sampled-decay/nob_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_7-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
train/multi-wikis-sampled-decay/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_1-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 30577930, "hashes": {}}, "samples": 50000, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 9718551, "hashes": {}}}], "version": 2}
train/multi-wikis-sampled-decay/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_1-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 7292936, "total_tokens_skipped": 0, "percentiles": {"0th": 27, "10th": 58, "20th": 64, "30th": 71, "40th": 83, "50th": 98, "60th": 112, "70th": 134, "80th": 186, "90th": 287, "95th": 422, "99th": 782, "100th": 7384}}
train/multi-wikis-sampled-decay/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_1-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/multi-wikis-sampled-decay/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_11-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 7306531, "total_tokens_skipped": 0, "percentiles": {"0th": 28, "10th": 58, "20th": 64, "30th": 72, "40th": 81, "50th": 94, "60th": 110, "70th": 130, "80th": 182, "90th": 295, "95th": 432, "99th": 807, "100th": 7676}}
train/multi-wikis-sampled-decay/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_6-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 29986298, "hashes": {}}, "samples": 50000, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 9749378, "hashes": {}}}], "version": 2}
train/multi-wikis-sampled-decay/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_6-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 7132565, "total_tokens_skipped": 0, "percentiles": {"0th": 27, "10th": 57, "20th": 63, "30th": 70, "40th": 80, "50th": 93, "60th": 108, "70th": 127, "80th": 175, "90th": 282, "95th": 417, "99th": 799, "100th": 7267}}
train/multi-wikis-sampled-decay/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_6-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/multi-wikis-sampled-decay/azj_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_1-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 40870746, "hashes": {}}, "samples": 50000, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 11715027, "hashes": {}}}], "version": 2}
train/multi-wikis-sampled-decay/azj_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_1-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 9865013, "total_tokens_skipped": 0, "percentiles": {"0th": 28, "10th": 58, "20th": 69, "30th": 83, "40th": 96, "50th": 116, "60th": 147, "70th": 192, "80th": 264, "90th": 412, "95th": 595, "99th": 1145, "100th": 7167}}
train/multi-wikis-sampled-decay/azj_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_1-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/multi-wikis-sampled-decay/azj_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_2-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 22629637, "hashes": {}}, "samples": 28842, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 6370842, "hashes": {}}}], "version": 2}
train/multi-wikis-sampled-decay/azj_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_2-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 5446880, "total_tokens_skipped": 0, "percentiles": {"0th": 31, "10th": 57, "20th": 70, "30th": 83, "40th": 94, "50th": 105, "60th": 135, "70th": 179, "80th": 247, "90th": 387, "95th": 566, "99th": 1142, "100th": 6659}}
train/multi-wikis-sampled-decay/azj_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_2-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/multi-wikis-sampled-decay/azj_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"version": 2, "shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "articles_2-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds", "bytes": 22629637, "hashes": {}}, "samples": 28842, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "articles_2-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds.zstd", "bytes": 6370842, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "articles_1-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds", "bytes": 40870746, "hashes": {}}, "samples": 50000, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "articles_1-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds.zstd", "bytes": 11715027, "hashes": {}}}]}
train/multi-wikis-sampled-decay/azj_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/num_tokens.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ {
2
+ "num_tokens": 15311893
3
+ }
train/multi-wikis-sampled-decay/azj_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/sampling_summary.json ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "total_tokens": 15311893,
3
+ "target_tokens": 15000000,
4
+ "num_unique_folders": 2,
5
+ "num_total_folders_copied": 2,
6
+ "num_upsampled_folders": 0,
7
+ "copied_folders": [
8
+ "/home/oweller2/my_scratch/bert24-data/data/mmbert-ext/multi-wikis-sampled-decay/azj_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_2-tokenized-chunked-8192-512-32-backfill-nodups",
9
+ "/home/oweller2/my_scratch/bert24-data/data/mmbert-ext/multi-wikis-sampled-decay/azj_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_1-tokenized-chunked-8192-512-32-backfill-nodups"
10
+ ]
11
+ }
train/multi-wikis-sampled-decay/ben_Beng-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_1-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 66504193, "hashes": {}}, "samples": 50014, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 15574682, "hashes": {}}}], "version": 2}
train/multi-wikis-sampled-decay/ben_Beng-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_1-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 16270458, "total_tokens_skipped": 0, "percentiles": {"0th": 29, "10th": 95, "20th": 122, "30th": 150, "40th": 183, "50th": 223, "60th": 275, "70th": 346, "80th": 450, "90th": 656, "95th": 886, "99th": 1558, "100th": 8191}}
train/multi-wikis-sampled-decay/ben_Beng-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_1-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/multi-wikis-sampled-decay/ben_Beng-tokenized-chunked-8192-512-32-backfill-nodups-sampled/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"version": 2, "shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "articles_1-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds", "bytes": 66504193, "hashes": {}}, "samples": 50014, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "articles_1-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds.zstd", "bytes": 15574682, "hashes": {}}}]}
train/multi-wikis-sampled-decay/ben_Beng-tokenized-chunked-8192-512-32-backfill-nodups-sampled/num_tokens.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ {
2
+ "num_tokens": 16270458
3
+ }
train/multi-wikis-sampled-decay/ben_Beng-tokenized-chunked-8192-512-32-backfill-nodups-sampled/sampling_summary.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "total_tokens": 16270458,
3
+ "target_tokens": 15000000,
4
+ "num_unique_folders": 1,
5
+ "num_total_folders_copied": 1,
6
+ "num_upsampled_folders": 0,
7
+ "copied_folders": [
8
+ "/home/oweller2/my_scratch/bert24-data/data/mmbert-ext/multi-wikis-sampled-decay/ben_Beng-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_1-tokenized-chunked-8192-512-32-backfill-nodups"
9
+ ]
10
+ }
train/multi-wikis-sampled-decay/bul_Cyrl-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_4-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6e07398286dbd62d63e73533377f4b225cbca58965e2bb54eec04b96a0fc6a71
3
+ size 30630326
train/multi-wikis-sampled-decay/ces_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_2-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 35743442, "hashes": {}}, "samples": 50001, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 12853422, "hashes": {}}}], "version": 2}
train/multi-wikis-sampled-decay/ces_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_2-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 8571076, "total_tokens_skipped": 0, "percentiles": {"0th": 27, "10th": 58, "20th": 71, "30th": 85, "40th": 102, "50th": 122, "60th": 148, "70th": 184, "80th": 239, "90th": 338, "95th": 446, "99th": 752, "100th": 8190}}
train/multi-wikis-sampled-decay/ces_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_2-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/multi-wikis-sampled-decay/ces_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_3-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 35497845, "hashes": {}}, "samples": 50000, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 12753714, "hashes": {}}}], "version": 2}
train/multi-wikis-sampled-decay/ces_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_3-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 8509731, "total_tokens_skipped": 0, "percentiles": {"0th": 27, "10th": 59, "20th": 71, "30th": 85, "40th": 101, "50th": 121, "60th": 146, "70th": 182, "80th": 235, "90th": 335, "95th": 447, "99th": 744, "100th": 6777}}
train/multi-wikis-sampled-decay/ces_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_3-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/multi-wikis-sampled-decay/ces_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"version": 2, "shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "articles_2-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds", "bytes": 35743442, "hashes": {}}, "samples": 50001, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "articles_2-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds.zstd", "bytes": 12853422, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "articles_3-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds", "bytes": 35497845, "hashes": {}}, "samples": 50000, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "articles_3-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds.zstd", "bytes": 12753714, "hashes": {}}}]}
train/multi-wikis-sampled-decay/ces_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/num_tokens.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ {
2
+ "num_tokens": 17080807
3
+ }
train/multi-wikis-sampled-decay/ces_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/sampling_summary.json ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "total_tokens": 17080807,
3
+ "target_tokens": 15000000,
4
+ "num_unique_folders": 2,
5
+ "num_total_folders_copied": 2,
6
+ "num_upsampled_folders": 0,
7
+ "copied_folders": [
8
+ "/home/oweller2/my_scratch/bert24-data/data/mmbert-ext/multi-wikis-sampled-decay/ces_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_2-tokenized-chunked-8192-512-32-backfill-nodups",
9
+ "/home/oweller2/my_scratch/bert24-data/data/mmbert-ext/multi-wikis-sampled-decay/ces_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_3-tokenized-chunked-8192-512-32-backfill-nodups"
10
+ ]
11
+ }
train/multi-wikis-sampled-decay/ell_Grek-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_0-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 56547750, "hashes": {}}, "samples": 50004, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 16347974, "hashes": {}}}], "version": 2}
train/multi-wikis-sampled-decay/ell_Grek-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_0-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 13785066, "total_tokens_skipped": 0, "percentiles": {"0th": 31, "10th": 77, "20th": 101, "30th": 127, "40th": 158, "50th": 194, "60th": 236, "70th": 298, "80th": 394, "90th": 567, "95th": 765, "99th": 1242, "100th": 8190}}
train/multi-wikis-sampled-decay/ell_Grek-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_0-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/multi-wikis-sampled-decay/ell_Grek-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_3-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 58399968, "hashes": {}}, "samples": 50000, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 17011361, "hashes": {}}}], "version": 2}
train/multi-wikis-sampled-decay/ell_Grek-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_3-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 14232588, "total_tokens_skipped": 0, "percentiles": {"0th": 20, "10th": 78, "20th": 105, "30th": 134, "40th": 166, "50th": 201, "60th": 249, "70th": 310, "80th": 408, "90th": 590, "95th": 790, "99th": 1274, "100th": 6668}}
train/multi-wikis-sampled-decay/ell_Grek-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_3-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/multi-wikis-sampled-decay/ell_Grek-tokenized-chunked-8192-512-32-backfill-nodups-sampled/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"version": 2, "shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "articles_0-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds", "bytes": 56547750, "hashes": {}}, "samples": 50004, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "articles_0-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds.zstd", "bytes": 16347974, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "articles_3-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds", "bytes": 58399968, "hashes": {}}, "samples": 50000, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "articles_3-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds.zstd", "bytes": 17011361, "hashes": {}}}]}
train/multi-wikis-sampled-decay/ell_Grek-tokenized-chunked-8192-512-32-backfill-nodups-sampled/num_tokens.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ {
2
+ "num_tokens": 28017654
3
+ }
train/multi-wikis-sampled-decay/ell_Grek-tokenized-chunked-8192-512-32-backfill-nodups-sampled/sampling_summary.json ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "total_tokens": 28017654,
3
+ "target_tokens": 15000000,
4
+ "num_unique_folders": 2,
5
+ "num_total_folders_copied": 2,
6
+ "num_upsampled_folders": 0,
7
+ "copied_folders": [
8
+ "/home/oweller2/my_scratch/bert24-data/data/mmbert-ext/multi-wikis-sampled-decay/ell_Grek-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_3-tokenized-chunked-8192-512-32-backfill-nodups",
9
+ "/home/oweller2/my_scratch/bert24-data/data/mmbert-ext/multi-wikis-sampled-decay/ell_Grek-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_0-tokenized-chunked-8192-512-32-backfill-nodups"
10
+ ]
11
+ }
train/multi-wikis-sampled-decay/fas_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"version": 2, "shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "articles_4-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds", "bytes": 27448056, "hashes": {}}, "samples": 50009, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "articles_4-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds.zstd", "bytes": 8584014, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "articles_10-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds", "bytes": 27548619, "hashes": {}}, "samples": 50003, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "articles_10-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds.zstd", "bytes": 8781083, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "articles_1-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds", "bytes": 27912850, "hashes": {}}, "samples": 50000, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "articles_1-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds.zstd", "bytes": 9024998, "hashes": {}}}]}
train/multi-wikis-sampled-decay/fas_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/sampling_summary.json ADDED
@@ -0,0 +1,12 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "total_tokens": 19648264,
3
+ "target_tokens": 15000000,
4
+ "num_unique_folders": 3,
5
+ "num_total_folders_copied": 3,
6
+ "num_upsampled_folders": 0,
7
+ "copied_folders": [
8
+ "/home/oweller2/my_scratch/bert24-data/data/mmbert-ext/multi-wikis-sampled-decay/fas_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_4-tokenized-chunked-8192-512-32-backfill-nodups",
9
+ "/home/oweller2/my_scratch/bert24-data/data/mmbert-ext/multi-wikis-sampled-decay/fas_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_1-tokenized-chunked-8192-512-32-backfill-nodups",
10
+ "/home/oweller2/my_scratch/bert24-data/data/mmbert-ext/multi-wikis-sampled-decay/fas_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_10-tokenized-chunked-8192-512-32-backfill-nodups"
11
+ ]
12
+ }
train/multi-wikis-sampled-decay/khk_Cyrl-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_0-tokenized-chunked-8192-512-32-backfill-nodups-upsample-67d49d7c/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/multi-wikis-sampled-decay/kor_Hang-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_0-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b80434c7c49607268d07c491362f4ba5aa386e2077f7efed589dbb38a810a9e7
3
+ size 46287865
train/multi-wikis-sampled-decay/lvs_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_2-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f490f5d350a8219cf5f77d014d10056d7bc12f86facc8e598df2a654d8608c76
3
+ size 9114273
train/multi-wikis-sampled-decay/nob_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_2-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 29066527, "hashes": {}}, "samples": 50000, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 9929578, "hashes": {}}}], "version": 2}
train/multi-wikis-sampled-decay/nob_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_2-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 6902623, "total_tokens_skipped": 0, "percentiles": {"0th": 20, "10th": 51, "20th": 61, "30th": 72, "40th": 85, "50th": 102, "60th": 124, "70th": 153, "80th": 196, "90th": 272, "95th": 349, "99th": 546, "100th": 2869}}
train/multi-wikis-sampled-decay/nob_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_2-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/multi-wikis-sampled-decay/nob_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_7-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 28584534, "hashes": {}}, "samples": 50001, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 9558516, "hashes": {}}}], "version": 2}