X-Git-Url: http://sjero.net/git/?a=blobdiff_plain;f=src%2Fretr.c;h=f081d83160e332275ad3231b6cdc2ab9cc5fd905;hb=766df9d4e9392045a4e5c730ed81e599b509557a;hp=3234286baa8ea48c16a97d62ec52a49561ccc821;hpb=53d0aff795316dc1a4b785632f0d4d93c861e9cb;p=wget
diff --git a/src/retr.c b/src/retr.c
index 3234286b..f081d831 100644
--- a/src/retr.c
+++ b/src/retr.c
@@ -1,6 +1,6 @@
/* File retrieval.
Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003,
- 2004, 2005, 2006, 2007 Free Software Foundation, Inc.
+ 2004, 2005, 2006, 2007, 2008 Free Software Foundation, Inc.
This file is part of GNU Wget.
@@ -17,17 +17,18 @@ GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Wget. If not, see .
-In addition, as a special exception, the Free Software Foundation
-gives permission to link the code of its release of Wget with the
-OpenSSL project's "OpenSSL" library (or with modified versions of it
-that use the same license as the "OpenSSL" library), and distribute
-the linked executables. You must obey the GNU General Public License
-in all respects for all of the code used other than "OpenSSL". If you
-modify this file, you may extend this exception to your version of the
-file, but you are not obligated to do so. If you do not wish to do
-so, delete this exception statement from your version. */
+Additional permission under GNU GPL version 3 section 7
-#include
+If you modify this program, or any covered work, by linking or
+combining it with the OpenSSL project's OpenSSL library (or a
+modified version of that library), containing parts covered by the
+terms of the OpenSSL or SSLeay licenses, the Free Software Foundation
+grants you additional permission to convey the resulting work.
+Corresponding Source for a non-source form of such a combination
+shall include the source code for the parts of OpenSSL used as well
+as that of the covered work. */
+
+#include "wget.h"
#include
#include
@@ -38,7 +39,6 @@ so, delete this exception statement from your version. */
#include
#include
-#include "wget.h"
#include "utils.h"
#include "retr.h"
#include "progress.h"
@@ -51,6 +51,7 @@ so, delete this exception statement from your version. */
#include "hash.h"
#include "convert.h"
#include "ptimer.h"
+#include "html-url.h"
/* Total size of downloaded files. Used to enforce quota. */
SUM_SIZE_INT total_downloaded_bytes;
@@ -392,7 +393,7 @@ fd_read_hunk (int fd, hunk_terminator_t terminator, long sizehint, long maxsize)
char *hunk = xmalloc (bufsize);
int tail = 0; /* tail position in HUNK */
- assert (maxsize >= bufsize);
+ assert (!maxsize || maxsize >= bufsize);
while (1)
{
@@ -627,8 +628,10 @@ retrieve_url (const char *origurl, char **file, char **newloc,
u = url_parse (url, &up_error_code);
if (!u)
{
- logprintf (LOG_NOTQUIET, "%s: %s.\n", url, url_error (up_error_code));
+ char *error = url_error (url, up_error_code);
+ logprintf (LOG_NOTQUIET, "%s: %s.\n", url, error);
xfree (url);
+ xfree (error);
return URLERROR;
}
@@ -649,9 +652,11 @@ retrieve_url (const char *origurl, char **file, char **newloc,
proxy_url = url_parse (proxy, &up_error_code);
if (!proxy_url)
{
+ char *error = url_error (proxy, up_error_code);
logprintf (LOG_NOTQUIET, _("Error parsing proxy URL %s: %s.\n"),
- proxy, url_error (up_error_code));
+ proxy, error);
xfree (url);
+ xfree (error);
RESTORE_POST_DATA;
return PROXERR;
}
@@ -725,11 +730,13 @@ retrieve_url (const char *origurl, char **file, char **newloc,
newloc_parsed = url_parse (mynewloc, &up_error_code);
if (!newloc_parsed)
{
+ char *error = url_error (mynewloc, up_error_code);
logprintf (LOG_NOTQUIET, "%s: %s.\n", escnonprint_uri (mynewloc),
- url_error (up_error_code));
+ error);
url_free (u);
xfree (url);
xfree (mynewloc);
+ xfree (error);
RESTORE_POST_DATA;
return result;
}
@@ -778,6 +785,8 @@ retrieve_url (const char *origurl, char **file, char **newloc,
register_redirection (origurl, u->url);
if (*dt & TEXTHTML)
register_html (u->url, local_file);
+ if (*dt & TEXTCSS)
+ register_css (u->url, local_file);
}
}
@@ -819,10 +828,32 @@ retrieve_from_file (const char *file, bool html, int *count)
uerr_t status;
struct urlpos *url_list, *cur_url;
- url_list = (html ? get_urls_html (file, NULL, NULL)
- : get_urls_file (file));
+ char *input_file = NULL;
+ const char *url = file;
+
status = RETROK; /* Suppose everything is OK. */
*count = 0; /* Reset the URL count. */
+
+ if (url_has_scheme (url))
+ {
+ int dt;
+ uerr_t status;
+
+ if (!opt.base_href)
+ opt.base_href = xstrdup (url);
+
+ status = retrieve_url (url, &input_file, NULL, NULL, &dt, false);
+ if (status != RETROK)
+ return status;
+
+ if (dt & TEXTHTML)
+ html = true;
+ }
+ else
+ input_file = (char *) file;
+
+ url_list = (html ? get_urls_html (input_file, NULL, NULL)
+ : get_urls_file (input_file));
for (cur_url = url_list; cur_url; cur_url = cur_url->next, ++*count)
{
@@ -1039,3 +1070,16 @@ no_proxy_match (const char *host, const char **no_proxy)
else
return sufmatch (no_proxy, host);
}
+
+/* Set the file parameter to point to the local file string. */
+void
+set_local_file (const char **file, const char *default_file)
+{
+ if (opt.output_document)
+ {
+ if (output_stream_regular)
+ *file = opt.output_document;
+ }
+ else
+ *file = default_file;
+}