Download in parallel and use remote timestamp
There's something weird going on with --progress-bar, it was working before but I could not figure out which change broke it.
This commit is contained in:
parent
9ad03eec9d
commit
314671543a
2 changed files with 31 additions and 11 deletions
34
wcurl
34
wcurl
|
@ -62,12 +62,28 @@ do
|
||||||
fi
|
fi
|
||||||
done
|
done
|
||||||
|
|
||||||
# Download URLs one by one.
|
# The init read-only variable is used below in the for loop to check if the url
|
||||||
# It's not possible to download them in parallel due to the way "--continue-at-"
|
# being appended to the command is the first one or not. That's because once we
|
||||||
# works.
|
# are appending any URLs after the first one, we need to also make use of the
|
||||||
# Paralelism can be achieved by the way the user invokes wcurl.
|
# "--next" parameter.
|
||||||
for url in "${urls_to_download[@]}"; do
|
declare -r command_to_eval_init="curl --parallel"
|
||||||
# shellcheck disable=SC2086
|
command_to_eval="$command_to_eval_init"
|
||||||
curl --location --remote-name --retry 10 --retry-max-time 10 $curl_opts \
|
declare -r per_url_parameters="--location --remote-name --remote-time --retry 10 --retry-max-time 10 $curl_opts --continue-at -"
|
||||||
--continue-at - "$url"
|
|
||||||
done
|
# If we have URLs to download.
|
||||||
|
if (( ${#urls_to_download[@]} != 0 )); then
|
||||||
|
for url in "${urls_to_download[@]}"; do
|
||||||
|
# If this is the first command we've added, don't prepend "--next" to it.
|
||||||
|
if [[ "$command_to_eval" == "$command_to_eval_init" ]]; then
|
||||||
|
command_to_eval="$command_to_eval $per_url_parameters $url"
|
||||||
|
else
|
||||||
|
command_to_eval="$command_to_eval --next $per_url_parameters $url"
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Print command_to_eval if the debug environment variable is set.
|
||||||
|
[[ -z "${WCURL_DEBUG}" ]] || echo "$command_to_eval"
|
||||||
|
|
||||||
|
# Call curl with the generated parameters.
|
||||||
|
eval "$command_to_eval"
|
||||||
|
|
8
wcurl.1
8
wcurl.1
|
@ -19,7 +19,9 @@ parameters via the \fB\-o/\-\-opts\fR option.
|
||||||
.TP
|
.TP
|
||||||
By default, \fBwcurl\fR will:
|
By default, \fBwcurl\fR will:
|
||||||
.br
|
.br
|
||||||
\[bu] Encode whitespaces;
|
\[bu] Encode whitespaces in URLs;
|
||||||
|
.br
|
||||||
|
\[bu] Download multiple URLs in parallel;
|
||||||
.br
|
.br
|
||||||
\[bu] Follow redirects;
|
\[bu] Follow redirects;
|
||||||
.br
|
.br
|
||||||
|
@ -28,6 +30,8 @@ By default, \fBwcurl\fR will:
|
||||||
\[bu] Perform retries;
|
\[bu] Perform retries;
|
||||||
.br
|
.br
|
||||||
\[bu] Resume from broken/interrupted downloads.
|
\[bu] Resume from broken/interrupted downloads.
|
||||||
|
.br
|
||||||
|
\[bu] Set the downloaded file timestamp to the value provided by the server, if available;
|
||||||
.SH OPTIONS
|
.SH OPTIONS
|
||||||
.TP
|
.TP
|
||||||
\fB\-o, \-\-opts=\fI<CURL_OPTIONS>\fR...\fR
|
\fB\-o, \-\-opts=\fI<CURL_OPTIONS>\fR...\fR
|
||||||
|
@ -46,7 +50,7 @@ Download a single file:
|
||||||
.br
|
.br
|
||||||
\fBwcurl example.com/filename.txt\fR
|
\fBwcurl example.com/filename.txt\fR
|
||||||
.PP
|
.PP
|
||||||
Download two files:
|
Download two files in parallel:
|
||||||
.br
|
.br
|
||||||
\fBwcurl example.com/filename1.txt example.com/filename2.txt\fR
|
\fBwcurl example.com/filename1.txt example.com/filename2.txt\fR
|
||||||
.PP
|
.PP
|
||||||
|
|
Loading…
Reference in a new issue