wcurl/wcurl
Samuel Henrique 314671543a Download in parallel and use remote timestamp
There's something weird going on with --progress-bar, it was working
 before but I could not figure out which change broke it.
2024-06-26 22:44:43 +01:00

89 lines
3.5 KiB
Bash
Executable file

#!/bin/bash
# wcurl - a simple wrapper around curl for easily downloading files.
# version: 2024-05-14
#
# Copyright (C) Samuel Henrique, <samueloph@debian.org>.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice (including the next
# paragraph) shall be included in all copies or substantial portions of the
# Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
# SPDX-License-Identifier: MIT
function print_help {
printf "Usage: wcurl [-o|--opts=<CURL_OPTIONS>...] <URL>...\n"
printf "For all options use the manual: man wcurl\n"
}
# Initialize array which stores list of encoded URLs.
declare -a urls_to_download=()
# If no arguments were provided, show the help output and exit with an error.
if [ $# -eq 0 ]; then
>&2 echo "No arguments provided, here's the help output:"
print_help
exit 1
fi
# Parse arguments and encode URLs to be downloaded.
for argument in "$@"
do
# Check if argument is a parameter, we need to pass those to curl.
if [[ $argument == -o=* ]] || [[ $argument == --opts=* ]]; then
curl_opts="${argument#*=}"
# Show help output on -h|--help
elif [[ $argument == "-h" ]] || [[ $argument == "--help" ]]; then
print_help
exit 0
# Unknown parameter provided.
elif [[ $argument == -* ]]; then
echo "Unsuported parameter provided, only -o= and --opts= are supported: $argument"
exit 1
# If it's not a parameter, assume it's an URL.
else
# Encode whitespaces into %20, since wget supports those URLs.
urls_to_download+=("${argument/ /%20/}")
fi
done
# The init read-only variable is used below in the for loop to check if the url
# being appended to the command is the first one or not. That's because once we
# are appending any URLs after the first one, we need to also make use of the
# "--next" parameter.
declare -r command_to_eval_init="curl --parallel"
command_to_eval="$command_to_eval_init"
declare -r per_url_parameters="--location --remote-name --remote-time --retry 10 --retry-max-time 10 $curl_opts --continue-at -"
# If we have URLs to download.
if (( ${#urls_to_download[@]} != 0 )); then
for url in "${urls_to_download[@]}"; do
# If this is the first command we've added, don't prepend "--next" to it.
if [[ "$command_to_eval" == "$command_to_eval_init" ]]; then
command_to_eval="$command_to_eval $per_url_parameters $url"
else
command_to_eval="$command_to_eval --next $per_url_parameters $url"
fi
done
fi
# Print command_to_eval if the debug environment variable is set.
[[ -z "${WCURL_DEBUG}" ]] || echo "$command_to_eval"
# Call curl with the generated parameters.
eval "$command_to_eval"