orionweller commited on
Commit
538c27d
1 Parent(s): bf6cf44

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitattributes +34 -0
  2. train/algebraic-stack/algebraic_stack_train_0013-tokenized-chunked-1024-512-128-backfill-nodups/shard.00000.mds +3 -0
  3. train/algebraic-stack/algebraic_stack_train_0013-tokenized-chunked-1024-512-128-backfill-nodups/shard.00001.mds +3 -0
  4. train/algebraic-stack/algebraic_stack_train_0013-tokenized-chunked-1024-512-128-backfill-nodups/shard.00002.mds +3 -0
  5. train/algebraic-stack/algebraic_stack_train_0013-tokenized-chunked-1024-512-128-backfill-nodups/shard.00003.mds +3 -0
  6. train/algebraic-stack/algebraic_stack_train_0013-tokenized-chunked-1024-512-128-backfill-nodups/shard.00004.mds +3 -0
  7. train/algebraic-stack/algebraic_stack_train_0013-tokenized-chunked-1024-512-128-backfill-nodups/shard.00005.mds +3 -0
  8. train/algebraic-stack/algebraic_stack_train_0013-tokenized-chunked-1024-512-128-backfill-nodups/shard.00006.mds +3 -0
  9. train/algebraic-stack/algebraic_stack_train_0013-tokenized-chunked-1024-512-128-backfill-nodups/shard.00007.mds +3 -0
  10. train/algebraic-stack/algebraic_stack_train_0013-tokenized-chunked-1024-512-128-backfill-nodups/shard.00008.mds +3 -0
  11. train/algebraic-stack/algebraic_stack_train_0013-tokenized-chunked-1024-512-128-backfill-nodups/shard.00009.mds +3 -0
  12. train/algebraic-stack/algebraic_stack_train_0013-tokenized-chunked-1024-512-128-backfill-nodups/shard.00010.mds +3 -0
  13. train/algebraic-stack/algebraic_stack_train_0013-tokenized-chunked-1024-512-128-backfill-nodups/shard.00012.mds +3 -0
  14. train/algebraic-stack/algebraic_stack_train_0015-tokenized-chunked-1024-512-128-backfill-nodups/shard.00001.mds +3 -0
  15. train/algebraic-stack/algebraic_stack_train_0015-tokenized-chunked-1024-512-128-backfill-nodups/shard.00003.mds +3 -0
  16. train/algebraic-stack/algebraic_stack_train_0015-tokenized-chunked-1024-512-128-backfill-nodups/shard.00004.mds +3 -0
  17. train/algebraic-stack/algebraic_stack_train_0015-tokenized-chunked-1024-512-128-backfill-nodups/shard.00006.mds +3 -0
  18. train/algebraic-stack/algebraic_stack_train_0015-tokenized-chunked-1024-512-128-backfill-nodups/shard.00008.mds +3 -0
  19. train/algebraic-stack/algebraic_stack_train_0015-tokenized-chunked-1024-512-128-backfill-nodups/shard.00010.mds +3 -0
  20. train/algebraic-stack/algebraic_stack_train_0015-tokenized-chunked-1024-512-128-backfill-nodups/shard.00015.mds +3 -0
  21. train/algebraic-stack/algebraic_stack_train_0015-tokenized-chunked-1024-512-128-backfill-nodups/shard.00018.mds +3 -0
  22. train/algebraic-stack/algebraic_stack_train_0015-tokenized-chunked-1024-512-128-backfill-nodups/shard.00020.mds +3 -0
  23. train/algebraic-stack/algebraic_stack_train_0015-tokenized-chunked-1024-512-128-backfill-nodups/shard.00022.mds +3 -0
  24. train/algebraic-stack/algebraic_stack_train_0015-tokenized-chunked-1024-512-128-backfill-nodups/shard.00023.mds +3 -0
  25. train/algebraic-stack/algebraic_stack_train_0015-tokenized-chunked-1024-512-128-backfill-nodups/shard.00024.mds +3 -0
  26. train/algebraic-stack/algebraic_stack_train_0015-tokenized-chunked-1024-512-128-backfill-nodups/shard.00025.mds +3 -0
  27. train/algebraic-stack/algebraic_stack_train_0015-tokenized-chunked-1024-512-128-backfill-nodups/shard.00027.mds +3 -0
  28. train/algebraic-stack/algebraic_stack_train_0015-tokenized-chunked-1024-512-128-backfill-nodups/shard.00029.mds +3 -0
  29. train/algebraic-stack/algebraic_stack_train_0015-tokenized-chunked-1024-512-128-backfill-nodups/shard.00032.mds +3 -0
  30. train/algebraic-stack/algebraic_stack_train_0015-tokenized-chunked-1024-512-128-backfill-nodups/shard.00034.mds +3 -0
  31. train/algebraic-stack/algebraic_stack_train_0015-tokenized-chunked-1024-512-128-backfill-nodups/shard.00035.mds +3 -0
  32. train/algebraic-stack/algebraic_stack_train_0015-tokenized-chunked-1024-512-128-backfill-nodups/shard.00036.mds +3 -0
  33. train/algebraic-stack/algebraic_stack_train_0015-tokenized-chunked-1024-512-128-backfill-nodups/shard.00037.mds +3 -0
  34. train/algebraic-stack/algebraic_stack_train_0015-tokenized-chunked-1024-512-128-backfill-nodups/shard.00038.mds +3 -0
  35. train/algebraic-stack/algebraic_stack_train_0015-tokenized-chunked-1024-512-128-backfill-nodups/shard.00039.mds +3 -0
  36. train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_11310-tokenized-chunked-1024-512-128-backfill-nodups/index.json +1 -0
  37. train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_11310-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json +4 -0
  38. train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_14196-tokenized-chunked-1024-512-128-backfill-nodups/index.json +1 -0
  39. train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_14196-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json +4 -0
  40. train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_16175-tokenized-chunked-1024-512-128-backfill-nodups/index.json +1 -0
  41. train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_16175-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json +4 -0
  42. train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_20886-tokenized-chunked-1024-512-128-backfill-nodups/index.json +1 -0
  43. train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_20886-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json +4 -0
  44. train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_21424-tokenized-chunked-1024-512-128-backfill-nodups/index.json +1 -0
  45. train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_21424-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json +4 -0
  46. train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_23938-tokenized-chunked-1024-512-128-backfill-nodups/index.json +1 -0
  47. train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_23938-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json +4 -0
  48. train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_30754-tokenized-chunked-1024-512-128-backfill-nodups/index.json +1 -0
  49. train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_30754-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json +4 -0
  50. train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_32921-tokenized-chunked-1024-512-128-backfill-nodups/index.json +1 -0
.gitattributes CHANGED
@@ -22210,3 +22210,37 @@ train/algebraic-stack/algebraic_stack_train_0008-tokenized-chunked-1024-512-128-
22210
  train/algebraic-stack/algebraic_stack_train_0008-tokenized-chunked-1024-512-128-backfill-nodups/shard.00002.mds filter=lfs diff=lfs merge=lfs -text
22211
  train/algebraic-stack/algebraic_stack_train_0008-tokenized-chunked-1024-512-128-backfill-nodups/shard.00015.mds filter=lfs diff=lfs merge=lfs -text
22212
  train/algebraic-stack/algebraic_stack_train_0008-tokenized-chunked-1024-512-128-backfill-nodups/shard.00004.mds filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
22210
  train/algebraic-stack/algebraic_stack_train_0008-tokenized-chunked-1024-512-128-backfill-nodups/shard.00002.mds filter=lfs diff=lfs merge=lfs -text
22211
  train/algebraic-stack/algebraic_stack_train_0008-tokenized-chunked-1024-512-128-backfill-nodups/shard.00015.mds filter=lfs diff=lfs merge=lfs -text
22212
  train/algebraic-stack/algebraic_stack_train_0008-tokenized-chunked-1024-512-128-backfill-nodups/shard.00004.mds filter=lfs diff=lfs merge=lfs -text
22213
+ train/algebraic-stack/algebraic_stack_train_0015-tokenized-chunked-1024-512-128-backfill-nodups/shard.00022.mds filter=lfs diff=lfs merge=lfs -text
22214
+ train/algebraic-stack/algebraic_stack_train_0015-tokenized-chunked-1024-512-128-backfill-nodups/shard.00027.mds filter=lfs diff=lfs merge=lfs -text
22215
+ train/algebraic-stack/algebraic_stack_train_0015-tokenized-chunked-1024-512-128-backfill-nodups/shard.00034.mds filter=lfs diff=lfs merge=lfs -text
22216
+ train/algebraic-stack/algebraic_stack_train_0015-tokenized-chunked-1024-512-128-backfill-nodups/shard.00038.mds filter=lfs diff=lfs merge=lfs -text
22217
+ train/algebraic-stack/algebraic_stack_train_0015-tokenized-chunked-1024-512-128-backfill-nodups/shard.00029.mds filter=lfs diff=lfs merge=lfs -text
22218
+ train/algebraic-stack/algebraic_stack_train_0015-tokenized-chunked-1024-512-128-backfill-nodups/shard.00004.mds filter=lfs diff=lfs merge=lfs -text
22219
+ train/algebraic-stack/algebraic_stack_train_0015-tokenized-chunked-1024-512-128-backfill-nodups/shard.00036.mds filter=lfs diff=lfs merge=lfs -text
22220
+ train/algebraic-stack/algebraic_stack_train_0015-tokenized-chunked-1024-512-128-backfill-nodups/shard.00008.mds filter=lfs diff=lfs merge=lfs -text
22221
+ train/algebraic-stack/algebraic_stack_train_0015-tokenized-chunked-1024-512-128-backfill-nodups/shard.00032.mds filter=lfs diff=lfs merge=lfs -text
22222
+ train/algebraic-stack/algebraic_stack_train_0015-tokenized-chunked-1024-512-128-backfill-nodups/shard.00035.mds filter=lfs diff=lfs merge=lfs -text
22223
+ train/algebraic-stack/algebraic_stack_train_0015-tokenized-chunked-1024-512-128-backfill-nodups/shard.00037.mds filter=lfs diff=lfs merge=lfs -text
22224
+ train/algebraic-stack/algebraic_stack_train_0015-tokenized-chunked-1024-512-128-backfill-nodups/shard.00023.mds filter=lfs diff=lfs merge=lfs -text
22225
+ train/algebraic-stack/algebraic_stack_train_0015-tokenized-chunked-1024-512-128-backfill-nodups/shard.00015.mds filter=lfs diff=lfs merge=lfs -text
22226
+ train/algebraic-stack/algebraic_stack_train_0015-tokenized-chunked-1024-512-128-backfill-nodups/shard.00020.mds filter=lfs diff=lfs merge=lfs -text
22227
+ train/algebraic-stack/algebraic_stack_train_0015-tokenized-chunked-1024-512-128-backfill-nodups/shard.00006.mds filter=lfs diff=lfs merge=lfs -text
22228
+ train/algebraic-stack/algebraic_stack_train_0015-tokenized-chunked-1024-512-128-backfill-nodups/shard.00039.mds filter=lfs diff=lfs merge=lfs -text
22229
+ train/algebraic-stack/algebraic_stack_train_0015-tokenized-chunked-1024-512-128-backfill-nodups/shard.00001.mds filter=lfs diff=lfs merge=lfs -text
22230
+ train/algebraic-stack/algebraic_stack_train_0015-tokenized-chunked-1024-512-128-backfill-nodups/shard.00003.mds filter=lfs diff=lfs merge=lfs -text
22231
+ train/algebraic-stack/algebraic_stack_train_0015-tokenized-chunked-1024-512-128-backfill-nodups/shard.00024.mds filter=lfs diff=lfs merge=lfs -text
22232
+ train/algebraic-stack/algebraic_stack_train_0015-tokenized-chunked-1024-512-128-backfill-nodups/shard.00018.mds filter=lfs diff=lfs merge=lfs -text
22233
+ train/algebraic-stack/algebraic_stack_train_0013-tokenized-chunked-1024-512-128-backfill-nodups/shard.00007.mds filter=lfs diff=lfs merge=lfs -text
22234
+ train/algebraic-stack/algebraic_stack_train_0013-tokenized-chunked-1024-512-128-backfill-nodups/shard.00006.mds filter=lfs diff=lfs merge=lfs -text
22235
+ train/algebraic-stack/algebraic_stack_train_0015-tokenized-chunked-1024-512-128-backfill-nodups/shard.00025.mds filter=lfs diff=lfs merge=lfs -text
22236
+ train/algebraic-stack/algebraic_stack_train_0013-tokenized-chunked-1024-512-128-backfill-nodups/shard.00000.mds filter=lfs diff=lfs merge=lfs -text
22237
+ train/algebraic-stack/algebraic_stack_train_0015-tokenized-chunked-1024-512-128-backfill-nodups/shard.00010.mds filter=lfs diff=lfs merge=lfs -text
22238
+ train/algebraic-stack/algebraic_stack_train_0013-tokenized-chunked-1024-512-128-backfill-nodups/shard.00004.mds filter=lfs diff=lfs merge=lfs -text
22239
+ train/algebraic-stack/algebraic_stack_train_0013-tokenized-chunked-1024-512-128-backfill-nodups/shard.00009.mds filter=lfs diff=lfs merge=lfs -text
22240
+ train/algebraic-stack/algebraic_stack_train_0013-tokenized-chunked-1024-512-128-backfill-nodups/shard.00008.mds filter=lfs diff=lfs merge=lfs -text
22241
+ train/algebraic-stack/algebraic_stack_train_0013-tokenized-chunked-1024-512-128-backfill-nodups/shard.00001.mds filter=lfs diff=lfs merge=lfs -text
22242
+ train/algebraic-stack/algebraic_stack_train_0013-tokenized-chunked-1024-512-128-backfill-nodups/shard.00003.mds filter=lfs diff=lfs merge=lfs -text
22243
+ train/algebraic-stack/algebraic_stack_train_0013-tokenized-chunked-1024-512-128-backfill-nodups/shard.00010.mds filter=lfs diff=lfs merge=lfs -text
22244
+ train/algebraic-stack/algebraic_stack_train_0013-tokenized-chunked-1024-512-128-backfill-nodups/shard.00005.mds filter=lfs diff=lfs merge=lfs -text
22245
+ train/algebraic-stack/algebraic_stack_train_0013-tokenized-chunked-1024-512-128-backfill-nodups/shard.00002.mds filter=lfs diff=lfs merge=lfs -text
22246
+ train/algebraic-stack/algebraic_stack_train_0013-tokenized-chunked-1024-512-128-backfill-nodups/shard.00012.mds filter=lfs diff=lfs merge=lfs -text
train/algebraic-stack/algebraic_stack_train_0013-tokenized-chunked-1024-512-128-backfill-nodups/shard.00000.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:886b11a3935945c6f689a3129b54e48229c178fc21e3c7eb439d90f806c33f1a
3
+ size 67108730
train/algebraic-stack/algebraic_stack_train_0013-tokenized-chunked-1024-512-128-backfill-nodups/shard.00001.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e594c127c276c198ef3952b7f1426e5a1cfa72039bd661113fa7bdc0d43d0733
3
+ size 67106937
train/algebraic-stack/algebraic_stack_train_0013-tokenized-chunked-1024-512-128-backfill-nodups/shard.00002.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:82c2a6605394668f6ebd80436f5743099cbeee29cba8d5f2103a1d088d0f6e63
3
+ size 67108119
train/algebraic-stack/algebraic_stack_train_0013-tokenized-chunked-1024-512-128-backfill-nodups/shard.00003.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:73122bd530d98d2034a3e692e0f8ca2ba4e7e5837a1124a234642464dff5d480
3
+ size 67107288
train/algebraic-stack/algebraic_stack_train_0013-tokenized-chunked-1024-512-128-backfill-nodups/shard.00004.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6f4ae29cfb30f8069f5883dce0c0696d27bfabfe0a357d21d8b5e5dcf54097c6
3
+ size 67107155
train/algebraic-stack/algebraic_stack_train_0013-tokenized-chunked-1024-512-128-backfill-nodups/shard.00005.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8f49792bfa549758e3628e4a9f159d7e5807c2ddab1b4d375af99cb203439ef5
3
+ size 67108120
train/algebraic-stack/algebraic_stack_train_0013-tokenized-chunked-1024-512-128-backfill-nodups/shard.00006.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:11762515a3977e4bbe54f7680e0fbaf3c44a1e1418039401ddd4b8387416ec9f
3
+ size 67108600
train/algebraic-stack/algebraic_stack_train_0013-tokenized-chunked-1024-512-128-backfill-nodups/shard.00007.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8d39f84ed69bd7b94f605d4074a76665881bdef205312ad9f972e701713057c7
3
+ size 67108484
train/algebraic-stack/algebraic_stack_train_0013-tokenized-chunked-1024-512-128-backfill-nodups/shard.00008.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3690acc1e93497a604b3b7bf063c938dd14cc2b4a3b89b986a797bd03a3d071c
3
+ size 67107776
train/algebraic-stack/algebraic_stack_train_0013-tokenized-chunked-1024-512-128-backfill-nodups/shard.00009.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4330d8a79fcc891f6e37b99e59f1c93fda31af26639e338ac8552ec05aeef8b5
3
+ size 67107830
train/algebraic-stack/algebraic_stack_train_0013-tokenized-chunked-1024-512-128-backfill-nodups/shard.00010.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fdeb58f1f5f1ad0470d58894db9af433459650f893a0e56fe17f183c7cb94ea8
3
+ size 67107371
train/algebraic-stack/algebraic_stack_train_0013-tokenized-chunked-1024-512-128-backfill-nodups/shard.00012.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b1bf23f57f8753f5fc6c9b0aa7580d0f941d9c520b69d46bf604f3f42d433040
3
+ size 17194487
train/algebraic-stack/algebraic_stack_train_0015-tokenized-chunked-1024-512-128-backfill-nodups/shard.00001.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6425def60ea7612b2c6e45a0a7c90778ae6fde79194362de30ae16684c0cffc3
3
+ size 67108356
train/algebraic-stack/algebraic_stack_train_0015-tokenized-chunked-1024-512-128-backfill-nodups/shard.00003.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6c70d57dc4f83bc70549f563868c1b1f80e256469e05fa3b67738827102b0d9a
3
+ size 67108312
train/algebraic-stack/algebraic_stack_train_0015-tokenized-chunked-1024-512-128-backfill-nodups/shard.00004.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3ac9112db16f928b6d5513f901bf91e9b79b75e3ccfb7fb634880cb3bd5f3a6d
3
+ size 67107304
train/algebraic-stack/algebraic_stack_train_0015-tokenized-chunked-1024-512-128-backfill-nodups/shard.00006.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4e7cc780a0906632a48627579696a394e489721b3f25a30a14b4ac335b69c665
3
+ size 67108489
train/algebraic-stack/algebraic_stack_train_0015-tokenized-chunked-1024-512-128-backfill-nodups/shard.00008.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7f27d1953f1b6a115a9a007bb6ef0ff92d50dc6ebd9f3540b9c2ef3a5179a14e
3
+ size 67106996
train/algebraic-stack/algebraic_stack_train_0015-tokenized-chunked-1024-512-128-backfill-nodups/shard.00010.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5b00fcfb332a8fdf7e277e671db0d12bc1e21a61a35d468dc81cf222f66067ce
3
+ size 67107314
train/algebraic-stack/algebraic_stack_train_0015-tokenized-chunked-1024-512-128-backfill-nodups/shard.00015.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6fc6d1bacf6310082b98fe4ef6399f520119f2b24e2cea19c983b78d0c5ee0b2
3
+ size 67108649
train/algebraic-stack/algebraic_stack_train_0015-tokenized-chunked-1024-512-128-backfill-nodups/shard.00018.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:08377028ac16e8c687918d302ff617160a07570826554f1138e3f6c9c0de71cb
3
+ size 67107614
train/algebraic-stack/algebraic_stack_train_0015-tokenized-chunked-1024-512-128-backfill-nodups/shard.00020.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fc98cef7ce5a1e6a14d6781a5bc1c4c04b72e06dbaa3d739fea996decef5ad61
3
+ size 67107750
train/algebraic-stack/algebraic_stack_train_0015-tokenized-chunked-1024-512-128-backfill-nodups/shard.00022.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:46de61dda2993c94c7d0a4fc43ce61fbe823ff807d9697f178dbcf7c607a903f
3
+ size 67107279
train/algebraic-stack/algebraic_stack_train_0015-tokenized-chunked-1024-512-128-backfill-nodups/shard.00023.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1f18760f72b46140473967f4790dee004911d5d22260a8f454e8c61413735ce9
3
+ size 67107523
train/algebraic-stack/algebraic_stack_train_0015-tokenized-chunked-1024-512-128-backfill-nodups/shard.00024.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dea6f3d3dd4be8923000ebc6ee4c580110e5b8baf660146e0f3a8e50aaaaaa83
3
+ size 67108147
train/algebraic-stack/algebraic_stack_train_0015-tokenized-chunked-1024-512-128-backfill-nodups/shard.00025.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2aaff361ef59f4e3b82514c7e4b3778aa1ec366380e08bbe904db1f4b702446f
3
+ size 67107481
train/algebraic-stack/algebraic_stack_train_0015-tokenized-chunked-1024-512-128-backfill-nodups/shard.00027.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e94b68cb3cccfa970d9c1b23b52561474521e57e8a399af20067936db70cc17d
3
+ size 67107141
train/algebraic-stack/algebraic_stack_train_0015-tokenized-chunked-1024-512-128-backfill-nodups/shard.00029.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:821b6c7ff06b7b7aefedcdff018c3bcf3b84559345b5aa750c32a77a264ab59d
3
+ size 67108160
train/algebraic-stack/algebraic_stack_train_0015-tokenized-chunked-1024-512-128-backfill-nodups/shard.00032.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:23774e884508d87b6f7fe4121f594992d539b6c8f0eaa826d0840af3d894c0e0
3
+ size 67108445
train/algebraic-stack/algebraic_stack_train_0015-tokenized-chunked-1024-512-128-backfill-nodups/shard.00034.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2dc57a075478785c07c7d2dbd156b7ccb19bab3ae4036139b18962a582485854
3
+ size 67107678
train/algebraic-stack/algebraic_stack_train_0015-tokenized-chunked-1024-512-128-backfill-nodups/shard.00035.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d6c6d7ffcc1c6deb03247f8ebd69f3496d1347121bdc19bc3262ff99550ad93a
3
+ size 67108838
train/algebraic-stack/algebraic_stack_train_0015-tokenized-chunked-1024-512-128-backfill-nodups/shard.00036.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e7feaef4fe47827ae9122b54c6e068d74d736f56dc2bc6bbd6e0026b1a566377
3
+ size 67108144
train/algebraic-stack/algebraic_stack_train_0015-tokenized-chunked-1024-512-128-backfill-nodups/shard.00037.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:09b960509458b284801193a39f1696d7dfa88dbe30c8aad365c1c3fef3f0f473
3
+ size 67108512
train/algebraic-stack/algebraic_stack_train_0015-tokenized-chunked-1024-512-128-backfill-nodups/shard.00038.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:688700c2964a1b5e7b27765872aa70c99f3f5eed106e18b310a842e7ce736593
3
+ size 67107840
train/algebraic-stack/algebraic_stack_train_0015-tokenized-chunked-1024-512-128-backfill-nodups/shard.00039.mds ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b7c8dda80a7c83ebc254347300b430bfc8bd8c69d775e94228ce5e4e99e6c43a
3
+ size 67106796
train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_11310-tokenized-chunked-1024-512-128-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67108847, "hashes": {}}, "samples": 44414, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 47923978, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 9765896, "hashes": {}}, "samples": 6600, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 6978000, "hashes": {}}}], "version": 2}
train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_11310-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ {
2
+ "num_tokens": 36858733,
3
+ "num_truncated_tokens": 36830118
4
+ }
train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_14196-tokenized-chunked-1024-512-128-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67108502, "hashes": {}}, "samples": 43155, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 47926464, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 17559399, "hashes": {}}, "samples": 11236, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 12557087, "hashes": {}}}], "version": 2}
train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_14196-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ {
2
+ "num_tokens": 40650349,
3
+ "num_truncated_tokens": 40616018
4
+ }
train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_16175-tokenized-chunked-1024-512-128-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67108849, "hashes": {}}, "samples": 44010, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 48109332, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 12373959, "hashes": {}}, "samples": 8109, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 8879910, "hashes": {}}}], "version": 2}
train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_16175-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ {
2
+ "num_tokens": 38128397,
3
+ "num_truncated_tokens": 38097601
4
+ }
train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_20886-tokenized-chunked-1024-512-128-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67108652, "hashes": {}}, "samples": 43827, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 47952179, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 13524754, "hashes": {}}, "samples": 8751, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 9713251, "hashes": {}}}], "version": 2}
train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_20886-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ {
2
+ "num_tokens": 38689349,
3
+ "num_truncated_tokens": 38658447
4
+ }
train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_21424-tokenized-chunked-1024-512-128-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67107898, "hashes": {}}, "samples": 45050, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 47940364, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 7012272, "hashes": {}}, "samples": 4683, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 5019622, "hashes": {}}}], "version": 2}
train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_21424-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ {
2
+ "num_tokens": 35521211,
3
+ "num_truncated_tokens": 35494279
4
+ }
train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_23938-tokenized-chunked-1024-512-128-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67107211, "hashes": {}}, "samples": 43888, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 47799967, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 11917332, "hashes": {}}, "samples": 7753, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 8492701, "hashes": {}}}], "version": 2}
train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_23938-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ {
2
+ "num_tokens": 37913924,
3
+ "num_truncated_tokens": 37884447
4
+ }
train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_30754-tokenized-chunked-1024-512-128-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67108056, "hashes": {}}, "samples": 42912, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 47865326, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 19753908, "hashes": {}}, "samples": 12574, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 14007784, "hashes": {}}}], "version": 2}
train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_30754-tokenized-chunked-1024-512-128-backfill-nodups/num_tokens.json ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ {
2
+ "num_tokens": 41713457,
3
+ "num_truncated_tokens": 41677443
4
+ }
train/mlfoundations-dclm-baseline-1.0-parquet-sampled-v3/split_32921-tokenized-chunked-1024-512-128-backfill-nodups/index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"shards": [{"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00000.mds", "bytes": 67108046, "hashes": {}}, "samples": 42528, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00000.mds.zstd", "bytes": 47325820, "hashes": {}}}, {"column_encodings": ["str", "ndarray:uint16"], "column_names": ["id", "input_ids"], "column_sizes": [null, null], "compression": "zstd", "format": "mds", "hashes": [], "raw_data": {"basename": "shard.00001.mds", "bytes": 21604636, "hashes": {}}, "samples": 13733, "size_limit": 67108864, "version": 2, "zip_data": {"basename": "shard.00001.mds.zstd", "bytes": 15279818, "hashes": {}}}], "version": 2}