X-Git-Url: http://sjero.net/git/?p=wget;a=blobdiff_plain;f=src%2Frecur.c;h=1121e5a721a1df618097e916fae652106e774237;hp=e4ffa4246f3744db34f7576df826250ae2930674;hb=HEAD;hpb=a9da78c6d8f9e2699107fcde81efeb607dde1cca diff --git a/src/recur.c b/src/recur.c index e4ffa424..1121e5a7 100644 --- a/src/recur.c +++ b/src/recur.c @@ -1,6 +1,7 @@ /* Handling of recursive HTTP retrieving. - Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, - 2004, 2005, 2006, 2007, 2008 Free Software Foundation, Inc. + Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, + 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012 Free Software Foundation, + Inc. This file is part of GNU Wget. @@ -33,9 +34,7 @@ as that of the covered work. */ #include #include #include -#ifdef HAVE_UNISTD_H -# include -#endif /* HAVE_UNISTD_H */ +#include #include #include @@ -111,7 +110,8 @@ url_enqueue (struct url_queue *queue, struct iri *i, if (queue->count > queue->maxcount) queue->maxcount = queue->count; - DEBUGP (("Enqueuing %s at depth %d\n", url, depth)); + DEBUGP (("Enqueuing %s at depth %d\n", + quotearg_n_style (0, escape_quoting_style, url), depth)); DEBUGP (("Queue count %d, maxcount %d.\n", queue->count, queue->maxcount)); if (i) @@ -152,7 +152,8 @@ url_dequeue (struct url_queue *queue, struct iri **i, --queue->count; - DEBUGP (("Dequeuing %s at depth %d\n", qel->url, qel->depth)); + DEBUGP (("Dequeuing %s at depth %d\n", + quotearg_n_style (0, escape_quoting_style, qel->url), qel->depth)); DEBUGP (("Queue count %d, maxcount %d.\n", queue->count, queue->maxcount)); xfree (qel); @@ -198,7 +199,6 @@ retrieve_tree (struct url *start_url_parsed, struct iri *pi) the queue, but haven't been downloaded yet. */ struct hash_table *blacklist; - int up_error_code; struct iri *i = iri_new (); #define COPYSTR(x) (x) ? xstrdup(x) : NULL; @@ -253,25 +253,22 @@ retrieve_tree (struct url *start_url_parsed, struct iri *pi) the second time. */ if (dl_url_file_map && hash_table_contains (dl_url_file_map, url)) { + bool is_css_bool; + file = xstrdup (hash_table_get (dl_url_file_map, url)); DEBUGP (("Already downloaded \"%s\", reusing it from \"%s\".\n", url, file)); - /* this sucks, needs to be combined! */ - if (html_allowed - && downloaded_html_set - && string_set_contains (downloaded_html_set, file)) + if ((is_css_bool = (css_allowed + && downloaded_css_set + && string_set_contains (downloaded_css_set, file))) + || (html_allowed + && downloaded_html_set + && string_set_contains (downloaded_html_set, file))) { descend = true; - is_css = false; - } - if (css_allowed - && downloaded_css_set - && string_set_contains (downloaded_css_set, file)) - { - descend = true; - is_css = true; + is_css = is_css_bool; } } else @@ -281,7 +278,7 @@ retrieve_tree (struct url *start_url_parsed, struct iri *pi) struct url *url_parsed = url_parse (url, &url_err, i, true); status = retrieve_url (url_parsed, url, &file, &redirected, referer, - &dt, false, i); + &dt, false, i, true); if (html_allowed && file && status == RETROK && (dt & RETROKF) && (dt & TEXTHTML)) @@ -320,6 +317,11 @@ retrieve_tree (struct url *start_url_parsed, struct iri *pi) xfree (url); url = redirected; } + else + { + xfree (url); + url = xstrdup (url_parsed->url); + } url_free(url_parsed); } @@ -503,15 +505,16 @@ download_child_p (const struct urlpos *upos, struct url *parent, int depth, } /* Several things to check for: - 1. if scheme is not http, and we don't load it - 2. check for relative links (if relative_only is set) - 3. check for domain - 4. check for no-parent - 5. check for excludes && includes - 6. check for suffix - 7. check for same host (if spanhost is unset), with possible + 1. if scheme is not https and https_only requested + 2. if scheme is not http, and we don't load it + 3. check for relative links (if relative_only is set) + 4. check for domain + 5. check for no-parent + 6. check for excludes && includes + 7. check for suffix + 8. check for same host (if spanhost is unset), with possible gethostbyname baggage - 8. check for robots.txt + 9. check for robots.txt Addendum: If the URL is FTP, and it is to be loaded, only the domain and suffix settings are "stronger". @@ -523,6 +526,14 @@ download_child_p (const struct urlpos *upos, struct url *parent, int depth, More time- and memory- consuming tests should be put later on the list. */ +#ifdef HAVE_SSL + if (opt.https_only && u->scheme != SCHEME_HTTPS) + { + DEBUGP (("Not following non-HTTPS links.\n")); + goto out; + } +#endif + /* Determine whether URL under consideration has a HTTP-like scheme. */ u_scheme_like_http = schemes_are_similar_p (u->scheme, SCHEME_HTTP); @@ -558,7 +569,8 @@ download_child_p (const struct urlpos *upos, struct url *parent, int depth, if (opt.no_parent && schemes_are_similar_p (u->scheme, start_url_parsed->scheme) && 0 == strcasecmp (u->host, start_url_parsed->host) - && u->port == start_url_parsed->port + && (u->scheme != start_url_parsed->scheme + || u->port == start_url_parsed->port) && !(opt.page_requisites && upos->link_inline_p)) { if (!subdir_p (start_url_parsed->dir, u->dir)) @@ -580,6 +592,11 @@ download_child_p (const struct urlpos *upos, struct url *parent, int depth, goto out; } } + if (!accept_url (url)) + { + DEBUGP (("%s is excluded/not-included through regex.\n", url)); + goto out; + } /* 6. Check for acceptance/rejection rules. We ignore these rules for directories (no file name to match) and for non-leaf HTMLs, @@ -630,7 +647,7 @@ download_child_p (const struct urlpos *upos, struct url *parent, int depth, files after downloading or we're just running a spider. */ if (opt.delete_after || opt.spider) { - logprintf (LOG_VERBOSE, "Removing %s.\n", rfile); + logprintf (LOG_VERBOSE, _("Removing %s.\n"), rfile); if (unlink (rfile)) logprintf (LOG_NOTQUIET, "unlink: %s\n", strerror (errno));