1.1 --- a/scripts/functions Sun Apr 13 18:25:30 2008 +0000
1.2 +++ b/scripts/functions Sun May 11 07:01:28 2008 +0000
1.3 @@ -283,22 +283,22 @@
1.4 # With automated download as we are doing, it can be very dangerous to use
1.5 # -c to continue the downloads. It's far better to simply overwrite the
1.6 # destination file
1.7 - # Some company networks have proxies to connect to to the internet, but
1.8 - # it's not easy to detect them, and wget may never timeout while connecting,
1.9 - # so force a global 120s timeout.
1.10 - wget -T 120 -nc --progress=dot:binary --tries=3 --passive-ftp "$1" \
1.11 - || wget -T 120 -nc --progress=dot:binary --tries=3 "$1" \
1.12 + # Some company networks have firewalls to connect to the internet, but it's
1.13 + # not easy to detect them, and wget does not timeout by default while
1.14 + # connecting, so force a global ${CT_CONNECT_TIMEOUT}-second timeout.
1.15 + wget -T ${CT_CONNECT_TIMEOUT} -nc --progress=dot:binary --tries=3 --passive-ftp "$1" \
1.16 + || wget -T ${CT_CONNECT_TIMEOUT} -nc --progress=dot:binary --tries=3 "$1" \
1.17 || true
1.18 }
1.19
1.20 # Download an URL using curl
1.21 # Usage: CT_DoGetFileCurl <URL>
1.22 CT_DoGetFileCurl() {
1.23 - # Note: comments about wget method are also valid here
1.24 + # Note: comments about wget method (above) are also valid here
1.25 # Plus: no good progress indicator is available with curl,
1.26 # so output is consigned to oblivion
1.27 - curl --ftp-pasv -O --retry 3 "$1" --connect-timeout 120 >/dev/null \
1.28 - || curl -O --retry 3 "$1" --connect-timeout 120 >/dev/null \
1.29 + curl --ftp-pasv -O --retry 3 "$1" --connect-timeout ${CT_CONNECT_TIMEOUT} >/dev/null \
1.30 + || curl -O --retry 3 "$1" --connect-timeout ${CT_CONNECT_TIMEOUT} >/dev/null \
1.31 || true
1.32 }
1.33