Datasets:

ArXiv:
License:
orionweller commited on
Commit
78c0626
·
verified ·
1 Parent(s): 6bf143f

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. train/multi-wikis-sampled-decay/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_11-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  2. train/multi-wikis-sampled-decay/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_11-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  3. train/multi-wikis-sampled-decay/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/index.json +1 -0
  4. train/multi-wikis-sampled-decay/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/num_tokens.json +3 -0
  5. train/multi-wikis-sampled-decay/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/sampling_summary.json +12 -0
  6. train/multi-wikis-sampled-decay/dan_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_0-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds +3 -0
  7. train/multi-wikis-sampled-decay/dan_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_2-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds +3 -0
  8. train/multi-wikis-sampled-decay/fra_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_13-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  9. train/multi-wikis-sampled-decay/fra_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_13-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  10. train/multi-wikis-sampled-decay/fra_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_13-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  11. train/multi-wikis-sampled-decay/fra_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_19-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  12. train/multi-wikis-sampled-decay/fra_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_19-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  13. train/multi-wikis-sampled-decay/fra_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_19-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  14. train/multi-wikis-sampled-decay/fra_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_20-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  15. train/multi-wikis-sampled-decay/fra_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_20-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  16. train/multi-wikis-sampled-decay/fra_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_20-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  17. train/multi-wikis-sampled-decay/fra_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/index.json +1 -0
  18. train/multi-wikis-sampled-decay/fra_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/num_tokens.json +3 -0
  19. train/multi-wikis-sampled-decay/fra_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/sampling_summary.json +12 -0
  20. train/multi-wikis-sampled-decay/hrv_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_0-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  21. train/multi-wikis-sampled-decay/hrv_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_0-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  22. train/multi-wikis-sampled-decay/hrv_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_0-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  23. train/multi-wikis-sampled-decay/hrv_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_2-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  24. train/multi-wikis-sampled-decay/hrv_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_2-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  25. train/multi-wikis-sampled-decay/hrv_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_2-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  26. train/multi-wikis-sampled-decay/hrv_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/index.json +1 -0
  27. train/multi-wikis-sampled-decay/hrv_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/num_tokens.json +3 -0
  28. train/multi-wikis-sampled-decay/hrv_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/sampling_summary.json +11 -0
  29. train/multi-wikis-sampled-decay/jpn_Jpan-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_0-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  30. train/multi-wikis-sampled-decay/jpn_Jpan-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_0-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  31. train/multi-wikis-sampled-decay/jpn_Jpan-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_0-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  32. train/multi-wikis-sampled-decay/jpn_Jpan-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_5-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  33. train/multi-wikis-sampled-decay/jpn_Jpan-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_5-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  34. train/multi-wikis-sampled-decay/jpn_Jpan-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_5-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  35. train/multi-wikis-sampled-decay/jpn_Jpan-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_7-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  36. train/multi-wikis-sampled-decay/jpn_Jpan-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_7-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  37. train/multi-wikis-sampled-decay/jpn_Jpan-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_7-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  38. train/multi-wikis-sampled-decay/jpn_Jpan-tokenized-chunked-8192-512-32-backfill-nodups-sampled/index.json +1 -0
  39. train/multi-wikis-sampled-decay/jpn_Jpan-tokenized-chunked-8192-512-32-backfill-nodups-sampled/num_tokens.json +3 -0
  40. train/multi-wikis-sampled-decay/jpn_Jpan-tokenized-chunked-8192-512-32-backfill-nodups-sampled/sampling_summary.json +12 -0
  41. train/multi-wikis-sampled-decay/khk_Cyrl-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_0-tokenized-chunked-8192-512-32-backfill-nodups-upsample-67d49d7c/index.json +1 -0
  42. train/multi-wikis-sampled-decay/khk_Cyrl-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_0-tokenized-chunked-8192-512-32-backfill-nodups-upsample-67d49d7c/stats.json +1 -0
  43. train/multi-wikis-sampled-decay/khk_Cyrl-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_0-tokenized-chunked-8192-512-32-backfill-nodups/index.json +1 -0
  44. train/multi-wikis-sampled-decay/khk_Cyrl-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_0-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  45. train/multi-wikis-sampled-decay/khk_Cyrl-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_0-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
  46. train/multi-wikis-sampled-decay/khk_Cyrl-tokenized-chunked-8192-512-32-backfill-nodups-sampled/index.json +1 -0
  47. train/multi-wikis-sampled-decay/khk_Cyrl-tokenized-chunked-8192-512-32-backfill-nodups-sampled/num_tokens.json +3 -0
  48. train/multi-wikis-sampled-decay/khk_Cyrl-tokenized-chunked-8192-512-32-backfill-nodups-sampled/sampling_summary.json +11 -0
  49. train/multi-wikis-sampled-decay/lvs_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_0-tokenized-chunked-8192-512-32-backfill-nodups/stats.json +1 -0
  50. train/multi-wikis-sampled-decay/lvs_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_0-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json +0 -0
train/multi-wikis-sampled-decay/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_11-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 30682583, "hashes": {}}, "samples": 50000, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 9953169, "hashes": {}}}], "version": 2}
train/multi-wikis-sampled-decay/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_11-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/multi-wikis-sampled-decay/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"version": 2, "shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "articles_11-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds", "bytes": 30682583, "hashes": {}}, "samples": 50000, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "articles_11-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds.zstd", "bytes": 9953169, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "articles_1-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds", "bytes": 30577930, "hashes": {}}, "samples": 50000, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "articles_1-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds.zstd", "bytes": 9718551, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "articles_6-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds", "bytes": 29986298, "hashes": {}}, "samples": 50000, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "articles_6-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds.zstd", "bytes": 9749378, "hashes": {}}}]}
train/multi-wikis-sampled-decay/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/num_tokens.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ {
2
+ "num_tokens": 21732032
3
+ }
train/multi-wikis-sampled-decay/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/sampling_summary.json ADDED
@@ -0,0 +1,12 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "total_tokens": 21732032,
3
+ "target_tokens": 15000000,
4
+ "num_unique_folders": 3,
5
+ "num_total_folders_copied": 3,
6
+ "num_upsampled_folders": 0,
7
+ "copied_folders": [
8
+ "/home/oweller2/my_scratch/bert24-data/data/mmbert-ext/multi-wikis-sampled-decay/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_6-tokenized-chunked-8192-512-32-backfill-nodups",
9
+ "/home/oweller2/my_scratch/bert24-data/data/mmbert-ext/multi-wikis-sampled-decay/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_11-tokenized-chunked-8192-512-32-backfill-nodups",
10
+ "/home/oweller2/my_scratch/bert24-data/data/mmbert-ext/multi-wikis-sampled-decay/arb_Arab-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_1-tokenized-chunked-8192-512-32-backfill-nodups"
11
+ ]
12
+ }
train/multi-wikis-sampled-decay/dan_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_0-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a8f5d239d9a7693214f0f834465fd7b9aa61953cf9dd40d392354ca312dd201f
3
+ size 32057951
train/multi-wikis-sampled-decay/dan_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_2-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:54fd946c0ec45c91c6e81b4f3102a8685f881403d0a399928573a06e3e067c94
3
+ size 32381802
train/multi-wikis-sampled-decay/fra_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_13-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 21855455, "hashes": {}}, "samples": 50000, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 7757432, "hashes": {}}}], "version": 2}
train/multi-wikis-sampled-decay/fra_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_13-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 5100589, "total_tokens_skipped": 0, "percentiles": {"0th": 20, "10th": 40, "20th": 46, "30th": 52, "40th": 61, "50th": 71, "60th": 84, "70th": 104, "80th": 134, "90th": 197, "95th": 271, "99th": 476, "100th": 6084}}
train/multi-wikis-sampled-decay/fra_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_13-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/multi-wikis-sampled-decay/fra_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_19-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 21514096, "hashes": {}}, "samples": 50000, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 7611020, "hashes": {}}}], "version": 2}
train/multi-wikis-sampled-decay/fra_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_19-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 5015290, "total_tokens_skipped": 0, "percentiles": {"0th": 20, "10th": 40, "20th": 46, "30th": 52, "40th": 60, "50th": 71, "60th": 84, "70th": 103, "80th": 132, "90th": 192, "95th": 265, "99th": 469, "100th": 3811}}
train/multi-wikis-sampled-decay/fra_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_19-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/multi-wikis-sampled-decay/fra_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_20-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 22550495, "hashes": {}}, "samples": 50000, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 8083470, "hashes": {}}}], "version": 2}
train/multi-wikis-sampled-decay/fra_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_20-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 5261787, "total_tokens_skipped": 0, "percentiles": {"0th": 20, "10th": 40, "20th": 46, "30th": 53, "40th": 61, "50th": 71, "60th": 86, "70th": 107, "80th": 139, "90th": 205, "95th": 282, "99th": 513, "100th": 3944}}
train/multi-wikis-sampled-decay/fra_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_20-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/multi-wikis-sampled-decay/fra_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"version": 2, "shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "articles_20-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds", "bytes": 22550495, "hashes": {}}, "samples": 50000, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "articles_20-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds.zstd", "bytes": 8083470, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "articles_13-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds", "bytes": 21855455, "hashes": {}}, "samples": 50000, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "articles_13-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds.zstd", "bytes": 7757432, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "articles_19-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds", "bytes": 21514096, "hashes": {}}, "samples": 50000, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "articles_19-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds.zstd", "bytes": 7611020, "hashes": {}}}]}
train/multi-wikis-sampled-decay/fra_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/num_tokens.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ {
2
+ "num_tokens": 15377666
3
+ }
train/multi-wikis-sampled-decay/fra_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/sampling_summary.json ADDED
@@ -0,0 +1,12 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "total_tokens": 15377666,
3
+ "target_tokens": 15000000,
4
+ "num_unique_folders": 3,
5
+ "num_total_folders_copied": 3,
6
+ "num_upsampled_folders": 0,
7
+ "copied_folders": [
8
+ "/home/oweller2/my_scratch/bert24-data/data/mmbert-ext/multi-wikis-sampled-decay/fra_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_19-tokenized-chunked-8192-512-32-backfill-nodups",
9
+ "/home/oweller2/my_scratch/bert24-data/data/mmbert-ext/multi-wikis-sampled-decay/fra_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_20-tokenized-chunked-8192-512-32-backfill-nodups",
10
+ "/home/oweller2/my_scratch/bert24-data/data/mmbert-ext/multi-wikis-sampled-decay/fra_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_13-tokenized-chunked-8192-512-32-backfill-nodups"
11
+ ]
12
+ }
train/multi-wikis-sampled-decay/hrv_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_0-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 41411914, "hashes": {}}, "samples": 50006, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 14348348, "hashes": {}}}], "version": 2}
train/multi-wikis-sampled-decay/hrv_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_0-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 10002747, "total_tokens_skipped": 0, "percentiles": {"0th": 26, "10th": 55, "20th": 65, "30th": 82, "40th": 103, "50th": 128, "60th": 162, "70th": 211, "80th": 285, "90th": 427, "95th": 582, "99th": 963, "100th": 8191}}
train/multi-wikis-sampled-decay/hrv_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_0-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/multi-wikis-sampled-decay/hrv_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_2-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 42263656, "hashes": {}}, "samples": 50008, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 14792104, "hashes": {}}}], "version": 2}
train/multi-wikis-sampled-decay/hrv_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_2-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 10200293, "total_tokens_skipped": 0, "percentiles": {"0th": 26, "10th": 55, "20th": 68, "30th": 87, "40th": 108, "50th": 135, "60th": 169, "70th": 217, "80th": 289, "90th": 427, "95th": 575, "99th": 1001, "100th": 8191}}
train/multi-wikis-sampled-decay/hrv_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_2-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/multi-wikis-sampled-decay/hrv_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"version": 2, "shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "articles_0-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds", "bytes": 41411914, "hashes": {}}, "samples": 50006, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "articles_0-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds.zstd", "bytes": 14348348, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "articles_2-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds", "bytes": 42263656, "hashes": {}}, "samples": 50008, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "articles_2-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds.zstd", "bytes": 14792104, "hashes": {}}}]}
train/multi-wikis-sampled-decay/hrv_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/num_tokens.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ {
2
+ "num_tokens": 20203040
3
+ }
train/multi-wikis-sampled-decay/hrv_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/sampling_summary.json ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "total_tokens": 20203040,
3
+ "target_tokens": 15000000,
4
+ "num_unique_folders": 2,
5
+ "num_total_folders_copied": 2,
6
+ "num_upsampled_folders": 0,
7
+ "copied_folders": [
8
+ "/home/oweller2/my_scratch/bert24-data/data/mmbert-ext/multi-wikis-sampled-decay/hrv_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_2-tokenized-chunked-8192-512-32-backfill-nodups",
9
+ "/home/oweller2/my_scratch/bert24-data/data/mmbert-ext/multi-wikis-sampled-decay/hrv_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_0-tokenized-chunked-8192-512-32-backfill-nodups"
10
+ ]
11
+ }
train/multi-wikis-sampled-decay/jpn_Jpan-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_0-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 28962242, "hashes": {}}, "samples": 50000, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 11444867, "hashes": {}}}], "version": 2}
train/multi-wikis-sampled-decay/jpn_Jpan-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_0-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 6892245, "total_tokens_skipped": 0, "percentiles": {"0th": 29, "10th": 71, "20th": 79, "30th": 88, "40th": 97, "50th": 108, "60th": 122, "70th": 142, "80th": 174, "90th": 237, "95th": 309, "99th": 519, "100th": 5723}}
train/multi-wikis-sampled-decay/jpn_Jpan-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_0-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/multi-wikis-sampled-decay/jpn_Jpan-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_5-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 29055484, "hashes": {}}, "samples": 50000, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 11399133, "hashes": {}}}], "version": 2}
train/multi-wikis-sampled-decay/jpn_Jpan-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_5-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 6900289, "total_tokens_skipped": 0, "percentiles": {"0th": 30, "10th": 71, "20th": 80, "30th": 88, "40th": 97, "50th": 108, "60th": 122, "70th": 143, "80th": 175, "90th": 237, "95th": 309, "99th": 515, "100th": 7768}}
train/multi-wikis-sampled-decay/jpn_Jpan-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_5-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/multi-wikis-sampled-decay/jpn_Jpan-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_7-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 28917320, "hashes": {}}, "samples": 50000, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 11417179, "hashes": {}}}], "version": 2}
train/multi-wikis-sampled-decay/jpn_Jpan-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_7-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 6865743, "total_tokens_skipped": 0, "percentiles": {"0th": 30, "10th": 71, "20th": 80, "30th": 88, "40th": 97, "50th": 108, "60th": 122, "70th": 142, "80th": 173, "90th": 237, "95th": 307, "99th": 516, "100th": 2758}}
train/multi-wikis-sampled-decay/jpn_Jpan-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_7-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/multi-wikis-sampled-decay/jpn_Jpan-tokenized-chunked-8192-512-32-backfill-nodups-sampled/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"version": 2, "shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "articles_0-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds", "bytes": 28962242, "hashes": {}}, "samples": 50000, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "articles_0-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds.zstd", "bytes": 11444867, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "articles_7-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds", "bytes": 28917320, "hashes": {}}, "samples": 50000, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "articles_7-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds.zstd", "bytes": 11417179, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "articles_5-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds", "bytes": 29055484, "hashes": {}}, "samples": 50000, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "articles_5-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds.zstd", "bytes": 11399133, "hashes": {}}}]}
train/multi-wikis-sampled-decay/jpn_Jpan-tokenized-chunked-8192-512-32-backfill-nodups-sampled/num_tokens.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ {
2
+ "num_tokens": 20658277
3
+ }
train/multi-wikis-sampled-decay/jpn_Jpan-tokenized-chunked-8192-512-32-backfill-nodups-sampled/sampling_summary.json ADDED
@@ -0,0 +1,12 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "total_tokens": 20658277,
3
+ "target_tokens": 15000000,
4
+ "num_unique_folders": 3,
5
+ "num_total_folders_copied": 3,
6
+ "num_upsampled_folders": 0,
7
+ "copied_folders": [
8
+ "/home/oweller2/my_scratch/bert24-data/data/mmbert-ext/multi-wikis-sampled-decay/jpn_Jpan-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_5-tokenized-chunked-8192-512-32-backfill-nodups",
9
+ "/home/oweller2/my_scratch/bert24-data/data/mmbert-ext/multi-wikis-sampled-decay/jpn_Jpan-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_0-tokenized-chunked-8192-512-32-backfill-nodups",
10
+ "/home/oweller2/my_scratch/bert24-data/data/mmbert-ext/multi-wikis-sampled-decay/jpn_Jpan-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_7-tokenized-chunked-8192-512-32-backfill-nodups"
11
+ ]
12
+ }
train/multi-wikis-sampled-decay/khk_Cyrl-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_0-tokenized-chunked-8192-512-32-backfill-nodups-upsample-67d49d7c/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 32528751, "hashes": {}}, "samples": 21696, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 8290817, "hashes": {}}}], "version": 2}
train/multi-wikis-sampled-decay/khk_Cyrl-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_0-tokenized-chunked-8192-512-32-backfill-nodups-upsample-67d49d7c/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 7980804, "total_tokens_skipped": 0, "percentiles": {"0th": 25, "10th": 77, "20th": 98, "30th": 124, "40th": 158, "50th": 203, "60th": 264, "70th": 350, "80th": 495, "90th": 765, "95th": 1097, "99th": 2563, "100th": 8191}}
train/multi-wikis-sampled-decay/khk_Cyrl-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_0-tokenized-chunked-8192-512-32-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 32528751, "hashes": {}}, "samples": 21696, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 8290817, "hashes": {}}}], "version": 2}
train/multi-wikis-sampled-decay/khk_Cyrl-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_0-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 7980804, "total_tokens_skipped": 0, "percentiles": {"0th": 25, "10th": 77, "20th": 98, "30th": 124, "40th": 158, "50th": 203, "60th": 264, "70th": 350, "80th": 495, "90th": 765, "95th": 1097, "99th": 2563, "100th": 8191}}
train/multi-wikis-sampled-decay/khk_Cyrl-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_0-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff
 
train/multi-wikis-sampled-decay/khk_Cyrl-tokenized-chunked-8192-512-32-backfill-nodups-sampled/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"version": 2, "shards": [{"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "articles_0-tokenized-chunked-8192-512-32-backfill-nodups-upsample-67d49d7c/shard.00000.mds", "bytes": 32528751, "hashes": {}}, "samples": 21696, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "articles_0-tokenized-chunked-8192-512-32-backfill-nodups-upsample-67d49d7c/shard.00000.mds.zstd", "bytes": 8290817, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint32"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "articles_0-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds", "bytes": 32528751, "hashes": {}}, "samples": 21696, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "articles_0-tokenized-chunked-8192-512-32-backfill-nodups/shard.00000.mds.zstd", "bytes": 8290817, "hashes": {}}}]}
train/multi-wikis-sampled-decay/khk_Cyrl-tokenized-chunked-8192-512-32-backfill-nodups-sampled/num_tokens.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ {
2
+ "num_tokens": 15961608
3
+ }
train/multi-wikis-sampled-decay/khk_Cyrl-tokenized-chunked-8192-512-32-backfill-nodups-sampled/sampling_summary.json ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "total_tokens": 15961608,
3
+ "target_tokens": 15000000,
4
+ "num_unique_folders": 1,
5
+ "num_total_folders_copied": 2,
6
+ "num_upsampled_folders": 1,
7
+ "copied_folders": [
8
+ "/home/oweller2/my_scratch/bert24-data/data/mmbert-ext/multi-wikis-sampled-decay/khk_Cyrl-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_0-tokenized-chunked-8192-512-32-backfill-nodups",
9
+ "/home/oweller2/my_scratch/bert24-data/data/mmbert-ext/multi-wikis-sampled-decay/khk_Cyrl-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_0-tokenized-chunked-8192-512-32-backfill-nodups-upsample-67d49d7c"
10
+ ]
11
+ }
train/multi-wikis-sampled-decay/lvs_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_0-tokenized-chunked-8192-512-32-backfill-nodups/stats.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"total_duplicated_tokens": 0, "total_tokens_written": 9689512, "total_tokens_skipped": 0, "percentiles": {"0th": 29, "10th": 66, "20th": 86, "30th": 106, "40th": 129, "50th": 154, "60th": 184, "70th": 221, "80th": 273, "90th": 362, "95th": 461, "99th": 729, "100th": 5979}}
train/multi-wikis-sampled-decay/lvs_Latn-tokenized-chunked-8192-512-32-backfill-nodups-sampled/articles_0-tokenized-chunked-8192-512-32-backfill-nodups/token_decile.json ADDED
The diff for this file is too large to render. See raw diff