# Check for ? in the url, this seems to indicate that there may be
# some cgi redirection involved which means continued downloading would
- # not work (bug 8993).
+ # not work (bug 8993).
# The sourceforge check pre-dates that but is lacking any documentation,
# I suspect it is a less general attempt to solve the same problem.
# (afk 2005-06-25)
@@ -113,13 +113,13 @@ function dl_wget_set_options() {
else
URL_HTTP_RETRIES="-t 3"
fi
-
+
ONLY_NEWER=""
DEREF_SYM="--retr-symlinks"
WGET_OPTIONS="$URL_HTTP_TIMEOUT $URL_HTTP_RETRIES $NO_CACHE $RATE $PASSIVE
$CONTINUE $ONLY_NEWER $DEREF_SYM"
debug 'dl_wget' "wget options: $WGET_OPTIONS"
-}
+}
#---------------------------------------------------------------------
##=back
##
diff --git a/var/lib/sorcery/modules/liburl b/var/lib/sorcery/modules/liburl
index 3d26356..67f471c 100755
--- a/var/lib/sorcery/modules/liburl
+++ b/var/lib/sorcery/modules/liburl
@@ -80,7 +80,7 @@
#---------------------------------------------------------------------
# Load library files (url_*) that contain url handlers
#
-# (2002/09/29) added if so it onlt loads the stuff once
+# (2002/09/29) added if so it only loads the stuff once
#---------------------------------------------------------------------
if ! [[ $URL_HANDLER_FILES ]] ; then
URL_HANDLER_FILES=`ls $SGL_LIBRARY_MODULES/url_handlers/url_*[^~]`
diff --git a/var/lib/sorcery/modules/url_handlers/url_http
b/var/lib/sorcery/modules/url_handlers/url_http
index 3543ed8..5f2d5ac 100755
--- a/var/lib/sorcery/modules/url_handlers/url_http
+++ b/var/lib/sorcery/modules/url_handlers/url_http
@@ -2,14 +2,14 @@
#---------------------------------------------------------------------
##
##=head1 SYNOPSIS
-##
+##
## Url handler functions for downloading http, https, and ftp urls
##
##=head1 DESCRIPTION
##
-## This file contains functions for downloading and verifying
+## This file contains functions for downloading and verifying
## http, https, and ftp urls. This file uses the "wget" program.
-##
+##
##=head1 COPYRIGHT
##
## Copyright 2002 by the Source Mage Team
@@ -38,7 +38,7 @@ function url_https_bucketize() {
#---------------------------------------------------------------------
##=item url_http_verify <url>
-##
+##
## Verifies the specified http url. Returns true if the url exists
## OR if the url is an empty string.
##
@@ -53,13 +53,13 @@ function url_http_verify() {
echo $OUTPUT
false
fi
- fi
+ fi
}
#---------------------------------------------------------------------
##=item url_https_verify <url>
-##
+##
## Verifies the specified https url. Returns true if the url exists
## OR if the url is an empty string.
##
@@ -71,16 +71,16 @@ function url_https_verify() {
#---------------------------------------------------------------------
##=item url_ftp_verify <url>
-##
+##
## Verifies the specified ftp url. Echos results of wget if file
## is not found.
##
#
# Implementation note: wget --spider still downloads ftp files in
-# full rather than just checking that the file is there. To get
+# full rather than just checking that the file is there. To get
# around this problem, we download the directory and see if the file
# is in the directory listing.
-#
+#
#---------------------------------------------------------------------
function url_ftp_verify() {
local URL=$1
@@ -98,7 +98,7 @@ function url_ftp_verify() {
rm -f .listing
false
fi
- fi
+ fi
}