Rewrite wcurl to make it less bash-dependent

Signed-off-by: Sergio Durigan Junior <sergiodj@debian.org>
This commit is contained in:
Sergio Durigan Junior 2024-06-29 15:48:39 -04:00
parent 8c1e021e38
commit f671caef32
Signed by untrusted user who does not match committer: sergiodj
GPG key ID: D0EB762865FC5E36

179
wcurl
View file

@ -1,96 +1,103 @@
#!/bin/bash #!/bin/bash
# wcurl - a simple wrapper around curl for easily downloading files. # wcurl - a simple wrapper around curl to easily download files.
# version: 2024-06-26
#
# Copyright (C) Samuel Henrique, <samueloph@debian.org>.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice (including the next
# paragraph) shall be included in all copies or substantial portions of the
# Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
# SPDX-License-Identifier: MIT
function print_help { set -e
printf "Usage: wcurl [-o|--opts=<CURL_OPTIONS>...] <URL>...\n"
printf "For all options use the manual: man wcurl\n" usage() {
cat << _EOF_
$0 -- a simple wrapper around curl to easily download files.
Usage: $0 [-o <CURL_OPTIONS>|--opts=<CURL_OPTIONS>] <URL>...
Options:
-o,--opts <CURL_OPTIONS>: Specify extra options to be
passed when invoking curl.
<URL>: The URL to be downloaded. May be specified more than once.
_EOF_
} }
# Initialize array which stores list of encoded URLs. error() {
declare -a urls_to_download=() printf "%s\n" "$*" > /dev/stderr
# If no arguments were provided, show the help output and exit with an error.
if [ $# -eq 0 ]; then
>&2 echo "No arguments provided, here's the help output:"
print_help
exit 1 exit 1
fi }
# Parse arguments and encode URLs to be downloaded. OPTS=$(getopt --options "o:h" --longoptions "opts:,help" --name wcurl -- "$@")
for argument in "$@"
do eval set -- "${OPTS}"
# Check if argument is a parameter, we need to pass those to curl.
if [[ $argument == -o=* ]] || [[ $argument == --opts=* ]]; then # Extra curl options.
curl_opts="${argument#*=}" # FIXME: Should this really be per-URL?
# Show help output on -h|--help CURL_OPTIONS=""
elif [[ $argument == "-h" ]] || [[ $argument == "--help" ]]; then
print_help # The URLs to be downloaded.
exit 0 URLS=""
# Unknown parameter provided.
elif [[ $argument == -* ]]; then # Set this to "--parallel" if there's more than one URL to download.
echo "Unsuported parameter provided, only -o= and --opts= are supported: $argument" CURL_PARALLEL=""
exit 1
# If it's not a parameter, assume it's an URL. # Parameters to be passed for each URL.
else PER_URL_PARAMETERS="--location --remote-name --remote-time --retry 10 --retry-max-time 10 --continue-at - "
# Encode whitespaces into %20, since wget supports those URLs.
urls_to_download+=("${argument/ /%20/}") # Sanitize parameters.
sanitize()
{
if [ -z "${URLS}" ]; then
error "You must provide at least one URL to download."
fi fi
if [ -n "${CURL_OPTIONS}" ]; then
PER_URL_PARAMETERS="${PER_URL_PARAMETERS} ${CURL_OPTIONS} "
fi
readonly CURL_OPTIONS URLS PER_URL_PARAMETERS CURL_PARALLEL
}
# Execute curl with the list of URLs provided by the user.
exec_curl()
{
set -- $URLS
# We can't use --next for the first URL.
CMD="curl ${CURL_PARALLEL} ${PER_URL_PARAMETERS} ${1} "
shift
for url in "$@"; do
CMD="${CMD} --next ${PER_URL_PARAMETERS} ${url}"
done
echo exec $CMD
}
while [ -n "${1}" ]; do
case "${1}" in
"-o"|"--opts")
shift
CURL_OPTIONS="${CURL_OPTIONS} ${1}"
;;
"-h"|"--help")
usage
exit 0
;;
"--")
# This is the start of the list of URLs.
shift
if [ "$#" -gt 1 ]; then
CURL_PARALLEL="--parallel"
fi
for url in "$@"; do
newurl=$(printf "%s\n" "${url}" | sed 's/ /%20/g')
URLS="${URLS} ${newurl}"
done
break
;;
esac
shift
done done
# The init read-only variable is used below in the for loop to check if the url sanitize
# being appended to the command is the first one or not. That's because once we exec_curl
# are appending any URLs after the first one, we need to also make use of the
# "--next" parameter.
# Only set '--parallel' if there's more than one URL. This increases the
# compatibility with other parameters that can be passed through -o/--opts
# since some might not work together with --parallel.
if (( ${#urls_to_download[@]} > 1 )); then
declare -r command_to_exec_init="curl --parallel"
else
declare -r command_to_exec_init="curl"
fi
command_to_exec="$command_to_exec_init"
declare -r per_url_parameters="--location --remote-name --remote-time \
--retry 10 --retry-max-time 10 $curl_opts --continue-at -"
# If we have URLs to download.
if (( ${#urls_to_download[@]} > 0 )); then
for url in "${urls_to_download[@]}"; do
# If this is the first command we've added, don't prepend "--next" to it.
if [[ "$command_to_exec" == "$command_to_exec_init" ]]; then
command_to_exec="$command_to_exec $per_url_parameters $url"
else
command_to_exec="$command_to_exec --next $per_url_parameters $url"
fi
done
fi
# Call curl with the generated parameters.
exec $command_to_exec