Update dlmodels.sh
Browse files- dlmodels.sh +28 -13
dlmodels.sh
CHANGED
|
@@ -9,6 +9,21 @@ fi
|
|
| 9 |
# Set the input file from the first argument
|
| 10 |
INPUT_FILE="$1"
|
| 11 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 12 |
# Function to check file existence and download integrity
|
| 13 |
download_file() {
|
| 14 |
local filepath="$1"
|
|
@@ -16,40 +31,40 @@ download_file() {
|
|
| 16 |
|
| 17 |
echo -e "\nChecking file: $filepath"
|
| 18 |
|
| 19 |
-
# If file already exists,
|
| 20 |
if [[ -f "$filepath" ]]; then
|
| 21 |
-
# Get
|
| 22 |
-
remote_size=$(
|
| 23 |
local_size=$(stat -c%s "$filepath")
|
| 24 |
|
| 25 |
-
# Debug output
|
| 26 |
echo "Remote size: ${remote_size:-Unknown}, Local size: $local_size for file $filepath"
|
| 27 |
|
| 28 |
-
#
|
| 29 |
-
if [[ -z "$remote_size" ]]; then
|
| 30 |
-
echo -e "[SKIP] Unable to verify size for $filepath
|
| 31 |
return 0
|
| 32 |
fi
|
| 33 |
|
| 34 |
-
# If the file sizes match, skip
|
| 35 |
if [[ "$remote_size" == "$local_size" ]]; then
|
| 36 |
echo -e "[SKIP] $filepath already exists and is complete."
|
| 37 |
return 0
|
| 38 |
else
|
| 39 |
echo -e "[RE-DOWNLOAD] $filepath is incomplete or corrupted. Re-downloading..."
|
| 40 |
-
rm -f "$filepath" # Remove the corrupted file
|
| 41 |
fi
|
| 42 |
else
|
| 43 |
echo -e "[DOWNLOAD] Starting download of $filepath"
|
| 44 |
fi
|
| 45 |
|
| 46 |
-
# Download the file
|
| 47 |
wget -c --progress=dot -O "$filepath" "$url" 2>&1 | \
|
| 48 |
grep --line-buffered "%" | \
|
| 49 |
sed -u -e "s,\.,,g" | \
|
| 50 |
awk '{printf("\r[DOWNLOADING] %s - %s", "'$filepath'", $2)}'
|
| 51 |
|
| 52 |
-
# Confirm completion
|
| 53 |
echo -e "\n[COMPLETE] Downloaded $filepath"
|
| 54 |
}
|
| 55 |
|
|
@@ -59,10 +74,10 @@ while IFS= read -r line; do
|
|
| 59 |
filepath=$(echo "$line" | awk '{print $1}')
|
| 60 |
url=$(echo "$line" | awk '{print $2}')
|
| 61 |
|
| 62 |
-
# Call the download function
|
| 63 |
download_file "$filepath" "$url" &
|
| 64 |
done < "$INPUT_FILE"
|
| 65 |
|
| 66 |
-
# Wait for all background
|
| 67 |
wait
|
| 68 |
echo -e "\nAll downloads are completed."
|
|
|
|
| 9 |
# Set the input file from the first argument
|
| 10 |
INPUT_FILE="$1"
|
| 11 |
|
| 12 |
+
# Function to reliably get the remote file size
|
| 13 |
+
get_remote_size() {
|
| 14 |
+
local url="$1"
|
| 15 |
+
# Attempt to retrieve Content-Length header
|
| 16 |
+
local remote_size=$(curl -sI "$url" | grep -i "Content-Length" | awk '{print $2}' | tr -d '\r')
|
| 17 |
+
|
| 18 |
+
# If Content-Length is not available, try downloading a small byte range
|
| 19 |
+
if [[ -z "$remote_size" || "$remote_size" -lt 2 ]]; then
|
| 20 |
+
remote_size=$(curl -s --range 0-1 "$url" | wc -c)
|
| 21 |
+
fi
|
| 22 |
+
|
| 23 |
+
# Return the size, or empty if it can't be determined
|
| 24 |
+
echo "$remote_size"
|
| 25 |
+
}
|
| 26 |
+
|
| 27 |
# Function to check file existence and download integrity
|
| 28 |
download_file() {
|
| 29 |
local filepath="$1"
|
|
|
|
| 31 |
|
| 32 |
echo -e "\nChecking file: $filepath"
|
| 33 |
|
| 34 |
+
# If file already exists, verify integrity
|
| 35 |
if [[ -f "$filepath" ]]; then
|
| 36 |
+
# Get remote file size using get_remote_size function
|
| 37 |
+
remote_size=$(get_remote_size "$url")
|
| 38 |
local_size=$(stat -c%s "$filepath")
|
| 39 |
|
| 40 |
+
# Debug output to confirm size checks
|
| 41 |
echo "Remote size: ${remote_size:-Unknown}, Local size: $local_size for file $filepath"
|
| 42 |
|
| 43 |
+
# Skip file if Content-Length is not available or remote size can't be determined
|
| 44 |
+
if [[ -z "$remote_size" || "$remote_size" -lt 2 ]]; then
|
| 45 |
+
echo -e "[SKIP] Unable to verify size for $filepath. Assuming complete."
|
| 46 |
return 0
|
| 47 |
fi
|
| 48 |
|
| 49 |
+
# If the file sizes match, skip re-downloading
|
| 50 |
if [[ "$remote_size" == "$local_size" ]]; then
|
| 51 |
echo -e "[SKIP] $filepath already exists and is complete."
|
| 52 |
return 0
|
| 53 |
else
|
| 54 |
echo -e "[RE-DOWNLOAD] $filepath is incomplete or corrupted. Re-downloading..."
|
| 55 |
+
rm -f "$filepath" # Remove the corrupted file before re-downloading
|
| 56 |
fi
|
| 57 |
else
|
| 58 |
echo -e "[DOWNLOAD] Starting download of $filepath"
|
| 59 |
fi
|
| 60 |
|
| 61 |
+
# Download the file using wget, with progress shown in a clean format
|
| 62 |
wget -c --progress=dot -O "$filepath" "$url" 2>&1 | \
|
| 63 |
grep --line-buffered "%" | \
|
| 64 |
sed -u -e "s,\.,,g" | \
|
| 65 |
awk '{printf("\r[DOWNLOADING] %s - %s", "'$filepath'", $2)}'
|
| 66 |
|
| 67 |
+
# Confirm download completion
|
| 68 |
echo -e "\n[COMPLETE] Downloaded $filepath"
|
| 69 |
}
|
| 70 |
|
|
|
|
| 74 |
filepath=$(echo "$line" | awk '{print $1}')
|
| 75 |
url=$(echo "$line" | awk '{print $2}')
|
| 76 |
|
| 77 |
+
# Call the download function for each file in the list
|
| 78 |
download_file "$filepath" "$url" &
|
| 79 |
done < "$INPUT_FILE"
|
| 80 |
|
| 81 |
+
# Wait for all background download processes to finish
|
| 82 |
wait
|
| 83 |
echo -e "\nAll downloads are completed."
|