Datasets:
LEAP
/

ArXiv:
License:
File size: 1,009 Bytes
9543638
266a225
 
5dd4378
266a225
 
 
 
 
5dd4378
266a225
 
5dd4378
 
 
 
 
266a225
 
 
9584e31
5dd4378
266a225
 
5dd4378
 
 
 
 
 
 
 
 
 
 
 
 
 
266a225
5dd4378
 
266a225
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
#!/bin/bash

# Base URL for the chunked files
BASE_URL="https://huggingface.co/datasets/LEAP/ChaosBench/resolve/main/$1/"

# The base name of the files (without extension)
BASE_NAME="$1_chunks"

# Extension for the chunked files
EXTENSION="tar.gz"

# Download all chunked files
for prefix in {a..z}; do
    for suffix in {a..z}; do
        FILE_NAME="${BASE_NAME}.${EXTENSION}.${prefix}${suffix}"
        wget "${BASE_URL}${FILE_NAME}" || break 2
    done
done

# Combine the chunked files
echo "COMBINING CHUNKS, THIS MAY TAKE A WHILE..."
cat ${BASE_NAME}.${EXTENSION}.* > "$1".tar.gz

# Remove the chunked files
rm ${BASE_NAME}.${EXTENSION}.*

# Extract the combined file
echo "EXTRACTING FOLDER, THIS MAY TAKE A WHILE..."
if [ "$2" == "with_pigz" ]; then
    echo "Use pigz for parallel decompression..."
    pigz -dc "$1".tar.gz | tar -xf -
else
    # Use standard tar for decompression
    tar -xzf "$1".tar.gz
fi

# Remove the combined compressed file
rm "$1".tar.gz

# Rename folder
mv "$1"_tmp "$1"