Add configuration options for retry on connection refused, number of times to attemp to download a file, and timeout for wget invocation. Thanks to Tim Sarbin for the patch, slightly modified by me to accomodate a second set of wget calls.
This commit is contained in:
parent
0873ccc013
commit
de63126155
3 changed files with 47 additions and 12 deletions
24
Config.in
24
Config.in
|
@ -746,6 +746,30 @@ menu "--- General Settings"
|
|||
# Any missing file will be downloaded and archived here,
|
||||
# if the user has the right priviledges.
|
||||
|
||||
config RETRYSRCDOWNLOAD
|
||||
bool "Retry on 'connection refused' failure"
|
||||
default n
|
||||
depends GETPKG
|
||||
help
|
||||
#-- Attempt to download a source package again if it fails
|
||||
# with a 'connection refused' error. This can happen on
|
||||
# servers that are overloaded.
|
||||
|
||||
config RETRYDOWNLOADCNT
|
||||
int "Number of retry attempts on download failures"
|
||||
default 20
|
||||
depends GETPKG
|
||||
help
|
||||
#-- Number of times to retry download a failed download.
|
||||
|
||||
config DOWNLOADTIMEOUT
|
||||
int "Download timeout (in seconds)"
|
||||
default 30
|
||||
depends GETPKG
|
||||
help
|
||||
#-- Number of seconds to wait for a download to start before
|
||||
# timing out.
|
||||
|
||||
config SERVER
|
||||
string "FTP mirror"
|
||||
default "ftp://ftp.lfs-matrix.net"
|
||||
|
|
|
@ -7,7 +7,14 @@
|
|||
get_sources() { # Download file, write name to MISSING_FILES.DMP if an error
|
||||
#----------------------------#
|
||||
local saveIFS=$IFS
|
||||
local IFS line URL1 URL2 FILE BOOKMD5 MD5 HAVEMD5 fromARCHIVE
|
||||
local IFS line URL1 URL2 FILE BOOKMD5 MD5 HAVEMD5 fromARCHIVE WGETPARAM
|
||||
|
||||
WGETPARAM=""
|
||||
if [[ "${RETRYSRCDOWNLOAD}" = "y" ]] ; then
|
||||
WGETPARAM+="--retry-connrefused"
|
||||
fi
|
||||
WGETPARAM+=" --tries ${RETRYDOWNLOADCNT}"
|
||||
WGETPARAM+=" --timeout ${DOWNLOADTIMEOUT}"
|
||||
|
||||
# Test if the packages must be downloaded
|
||||
[ ! "$GETPKG" = "y" ] && return
|
||||
|
@ -68,7 +75,7 @@ get_sources() { # Download file, write name to MISSING_FILES.DMP if
|
|||
fromARCHIVE=0
|
||||
# If the file does not exist yet in /sources download a fresh one
|
||||
if [ ! -f $FILE ] ; then
|
||||
if ! wget $URL1 && ! wget $URL2 ; then
|
||||
if ! wget $URL1 $WGETPARAM && ! wget $URL2 $WGETPARAM ; then
|
||||
gs_wrt_message "$FILE not found in the SRC_ARCHIVE or on any server..SKIPPING"
|
||||
continue
|
||||
fi
|
||||
|
@ -88,7 +95,7 @@ get_sources() { # Download file, write name to MISSING_FILES.DMP if
|
|||
# Force storage in SRC_ARCHIVE
|
||||
fromARCHIVE=0;
|
||||
# Try to retrieve again the file. Servers in reverse order.
|
||||
if ! wget $URL2 && ! wget $URL1 ; then
|
||||
if ! wget $URL2 $WGETPARAM && ! wget $URL1 $WGETPARAM ; then
|
||||
gs_wrt_message "$FILE not found on the servers.. SKIPPING"
|
||||
continue
|
||||
fi
|
||||
|
|
|
@ -23,7 +23,8 @@ inline_doc
|
|||
local -r BOOK_common="BOOK CUSTOM_TOOLS"
|
||||
local -r BOOK_clfsX="ARCH TARGET"
|
||||
local -r GENERAL_common="LUSER LGROUP LHOME BUILDDIR CLEAN GETPKG SRC_ARCHIVE \
|
||||
SERVER GETKERNEL RUNMAKE"
|
||||
SERVER RETRYSRCDOWNLOAD RETRYDOWNLOADCNT DOWNLOADTIMEOUT \
|
||||
GETKERNEL RUNMAKE"
|
||||
local -r BUILD_chroot="TEST BOMB_TEST STRIP"
|
||||
local -r BUILD_common="FSTAB CONFIG TIMEZONE PAGE LANG INSTALL_LOG"
|
||||
local -r ADVANCED_chroot="COMPARE RUN_ICA RUN_FARCE ITERATIONS OPTIMIZE"
|
||||
|
@ -143,6 +144,9 @@ inline_doc
|
|||
TARGET32) [[ -n "${TARGET32}" ]] && echo -e "`eval echo $PARAM_VALS`" ;;
|
||||
MIPS_LEVEL) [[ "${ARCH}" = "mips" ]] && echo -e "`eval echo $PARAM_VALS`" ;;
|
||||
SERVER) [[ "$GETPKG" = "y" ]] && echo -e "`eval echo $PARAM_VALS`" ;;
|
||||
RETRYSRCDOWNLOAD) [[ "$GETPKG" = "y" ]] && echo -e "`eval echo $PARAM_VALS`" ;;
|
||||
RETRYDOWNLOADCNT) [[ "$GETPKG" = "y" ]] && echo -e "`eval echo $PARAM_VALS`" ;;
|
||||
DOWNLOADTIMEOUT) [[ "$GETPKG" = "y" ]] && echo -e "`eval echo $PARAM_VALS`" ;;
|
||||
|
||||
# Envars that requires some validation
|
||||
LUSER) echo -e "`eval echo $PARAM_VALS`"
|
||||
|
|
Reference in a new issue