X-Git-Url: http://sjero.net/git/?p=wget;a=blobdiff_plain;f=src%2Frecur.c;h=1121e5a721a1df618097e916fae652106e774237;hp=98e7dc495ec420e479a08354d6f7ec49ce338fd6;hb=HEAD;hpb=857224758e60c6b206015ea4b9b30c79332a5b4c diff --git a/src/recur.c b/src/recur.c index 98e7dc49..1121e5a7 100644 --- a/src/recur.c +++ b/src/recur.c @@ -1,6 +1,7 @@ /* Handling of recursive HTTP retrieving. - Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, - 2004, 2005, 2006, 2007, 2008 Free Software Foundation, Inc. + Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, + 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012 Free Software Foundation, + Inc. This file is part of GNU Wget. @@ -33,9 +34,7 @@ as that of the covered work. */ #include #include #include -#ifdef HAVE_UNISTD_H -# include -#endif /* HAVE_UNISTD_H */ +#include #include #include @@ -51,7 +50,7 @@ as that of the covered work. */ #include "html-url.h" #include "css-url.h" #include "spider.h" - + /* Functions for maintaining the URL queue. */ struct queue_element { @@ -60,6 +59,7 @@ struct queue_element { int depth; /* the depth */ bool html_allowed; /* whether the document is allowed to be treated as HTML. */ + struct iri *iri; /* sXXXav */ bool css_allowed; /* whether the document is allowed to be treated as CSS. */ struct queue_element *next; /* next element in queue */ @@ -93,11 +93,12 @@ url_queue_delete (struct url_queue *queue) into it. */ static void -url_enqueue (struct url_queue *queue, +url_enqueue (struct url_queue *queue, struct iri *i, const char *url, const char *referer, int depth, bool html_allowed, bool css_allowed) { struct queue_element *qel = xnew (struct queue_element); + qel->iri = i; qel->url = url; qel->referer = referer; qel->depth = depth; @@ -109,9 +110,14 @@ url_enqueue (struct url_queue *queue, if (queue->count > queue->maxcount) queue->maxcount = queue->count; - DEBUGP (("Enqueuing %s at depth %d\n", url, depth)); + DEBUGP (("Enqueuing %s at depth %d\n", + quotearg_n_style (0, escape_quoting_style, url), depth)); DEBUGP (("Queue count %d, maxcount %d.\n", queue->count, queue->maxcount)); + if (i) + DEBUGP (("[IRI Enqueuing %s with %s\n", quote_n (0, url), + i->uri_encoding ? quote_n (1, i->uri_encoding) : "None")); + if (queue->tail) queue->tail->next = qel; queue->tail = qel; @@ -124,7 +130,7 @@ url_enqueue (struct url_queue *queue, succeeded, or false if the queue is empty. */ static bool -url_dequeue (struct url_queue *queue, +url_dequeue (struct url_queue *queue, struct iri **i, const char **url, const char **referer, int *depth, bool *html_allowed, bool *css_allowed) { @@ -137,6 +143,7 @@ url_dequeue (struct url_queue *queue, if (!queue->head) queue->tail = NULL; + *i = qel->iri; *url = qel->url; *referer = qel->referer; *depth = qel->depth; @@ -145,7 +152,8 @@ url_dequeue (struct url_queue *queue, --queue->count; - DEBUGP (("Dequeuing %s at depth %d\n", qel->url, qel->depth)); + DEBUGP (("Dequeuing %s at depth %d\n", + quotearg_n_style (0, escape_quoting_style, qel->url), qel->depth)); DEBUGP (("Queue count %d, maxcount %d.\n", queue->count, queue->maxcount)); xfree (qel); @@ -153,9 +161,9 @@ url_dequeue (struct url_queue *queue, } static bool download_child_p (const struct urlpos *, struct url *, int, - struct url *, struct hash_table *); + struct url *, struct hash_table *, struct iri *); static bool descend_redirect_p (const char *, struct url *, int, - struct url *, struct hash_table *); + struct url *, struct hash_table *, struct iri *); /* Retrieve a part of the web beginning with START_URL. This used to @@ -180,7 +188,7 @@ static bool descend_redirect_p (const char *, struct url *, int, options, add it to the queue. */ uerr_t -retrieve_tree (const char *start_url) +retrieve_tree (struct url *start_url_parsed, struct iri *pi) { uerr_t status = RETROK; @@ -191,23 +199,27 @@ retrieve_tree (const char *start_url) the queue, but haven't been downloaded yet. */ struct hash_table *blacklist; - int up_error_code; - struct url *start_url_parsed = url_parse (start_url, &up_error_code); + struct iri *i = iri_new (); - if (!start_url_parsed) +#define COPYSTR(x) (x) ? xstrdup(x) : NULL; + /* Duplicate pi struct if not NULL */ + if (pi) { - char *error = url_error (start_url, up_error_code); - logprintf (LOG_NOTQUIET, "%s: %s.\n", start_url, error); - xfree (error); - return URLERROR; + i->uri_encoding = COPYSTR (pi->uri_encoding); + i->content_encoding = COPYSTR (pi->content_encoding); + i->utf8_encode = pi->utf8_encode; } + else + set_uri_encoding (i, opt.locale, true); +#undef COPYSTR queue = url_queue_new (); blacklist = make_string_hash_table (0); /* Enqueue the starting URL. Use start_url_parsed->url rather than just URL so we enqueue the canonical form of the URL. */ - url_enqueue (queue, xstrdup (start_url_parsed->url), NULL, 0, true, false); + url_enqueue (queue, i, xstrdup (start_url_parsed->url), NULL, 0, true, + false); string_set_add (blacklist, start_url_parsed->url); while (1) @@ -226,7 +238,7 @@ retrieve_tree (const char *start_url) /* Get the next URL from the queue... */ - if (!url_dequeue (queue, + if (!url_dequeue (queue, (struct iri **) &i, (const char **)&url, (const char **)&referer, &depth, &html_allowed, &css_allowed)) break; @@ -241,44 +253,32 @@ retrieve_tree (const char *start_url) the second time. */ if (dl_url_file_map && hash_table_contains (dl_url_file_map, url)) { + bool is_css_bool; + file = xstrdup (hash_table_get (dl_url_file_map, url)); DEBUGP (("Already downloaded \"%s\", reusing it from \"%s\".\n", url, file)); - /* this sucks, needs to be combined! */ - if (html_allowed - && downloaded_html_set - && string_set_contains (downloaded_html_set, file)) - { - descend = true; - is_css = false; - } - if (css_allowed - && downloaded_css_set - && string_set_contains (downloaded_css_set, file)) + if ((is_css_bool = (css_allowed + && downloaded_css_set + && string_set_contains (downloaded_css_set, file))) + || (html_allowed + && downloaded_html_set + && string_set_contains (downloaded_html_set, file))) { descend = true; - is_css = true; + is_css = is_css_bool; } } else { int dt = 0, url_err; char *redirected = NULL; - struct url *url_parsed = url_parse (url, &url_err); + struct url *url_parsed = url_parse (url, &url_err, i, true); - if (!url_parsed) - { - char *error = url_error (url, url_err); - logprintf (LOG_NOTQUIET, "%s: %s.\n", url, error); - xfree (error); - status = URLERROR; - } - else - { - status = retrieve_url (url, &file, &redirected, referer, &dt, false); - } + status = retrieve_url (url_parsed, url, &file, &redirected, referer, + &dt, false, i, true); if (html_allowed && file && status == RETROK && (dt & RETROKF) && (dt & TEXTHTML)) @@ -306,7 +306,7 @@ retrieve_tree (const char *start_url) if (descend) { if (!descend_redirect_p (redirected, url_parsed, depth, - start_url_parsed, blacklist)) + start_url_parsed, blacklist, i)) descend = false; else /* Make sure that the old pre-redirect form gets @@ -317,6 +317,11 @@ retrieve_tree (const char *start_url) xfree (url); url = redirected; } + else + { + xfree (url); + url = xstrdup (url_parsed->url); + } url_free(url_parsed); } @@ -359,7 +364,7 @@ retrieve_tree (const char *start_url) bool meta_disallow_follow = false; struct urlpos *children = is_css ? get_urls_css_file (file, url) : - get_urls_html (file, url, &meta_disallow_follow); + get_urls_html (file, url, &meta_disallow_follow, i); if (opt.use_robots && meta_disallow_follow) { @@ -370,7 +375,8 @@ retrieve_tree (const char *start_url) if (children) { struct urlpos *child = children; - struct url *url_parsed = url_parsed = url_parse (url, NULL); + struct url *url_parsed = url_parse (url, NULL, i, true); + struct iri *ci; char *referer_url = url; bool strip_auth = (url_parsed != NULL && url_parsed->user != NULL); @@ -387,9 +393,11 @@ retrieve_tree (const char *start_url) if (dash_p_leaf_HTML && !child->link_inline_p) continue; if (download_child_p (child, url_parsed, depth, start_url_parsed, - blacklist)) + blacklist, i)) { - url_enqueue (queue, xstrdup (child->url->url), + ci = iri_new (); + set_uri_encoding (ci, i->content_encoding, false); + url_enqueue (queue, ci, xstrdup (child->url->url), xstrdup (referer_url), depth + 1, child->link_expect_html, child->link_expect_css); @@ -407,18 +415,18 @@ retrieve_tree (const char *start_url) } } - if (file - && (opt.delete_after + if (file + && (opt.delete_after || opt.spider /* opt.recursive is implicitely true */ || !acceptable (file))) { /* Either --delete-after was specified, or we loaded this - (otherwise unneeded because of --spider or rejected by -R) - HTML file just to harvest its hyperlinks -- in either case, + (otherwise unneeded because of --spider or rejected by -R) + HTML file just to harvest its hyperlinks -- in either case, delete the local file. */ DEBUGP (("Removing file due to %s in recursive_retrieve():\n", opt.delete_after ? "--delete-after" : - (opt.spider ? "--spider" : + (opt.spider ? "--spider" : "recursive rejection criteria"))); logprintf (LOG_VERBOSE, (opt.delete_after || opt.spider @@ -434,6 +442,7 @@ retrieve_tree (const char *start_url) xfree (url); xfree_null (referer); xfree_null (file); + iri_free (i); } /* If anything is left of the queue due to a premature exit, free it @@ -442,17 +451,17 @@ retrieve_tree (const char *start_url) char *d1, *d2; int d3; bool d4, d5; - while (url_dequeue (queue, + struct iri *d6; + while (url_dequeue (queue, (struct iri **)&d6, (const char **)&d1, (const char **)&d2, &d3, &d4, &d5)) { + iri_free (d6); xfree (d1); xfree_null (d2); } } url_queue_delete (queue); - if (start_url_parsed) - url_free (start_url_parsed); string_set_free (blacklist); if (opt.quota && total_downloaded_bytes > opt.quota) @@ -473,7 +482,8 @@ retrieve_tree (const char *start_url) static bool download_child_p (const struct urlpos *upos, struct url *parent, int depth, - struct url *start_url_parsed, struct hash_table *blacklist) + struct url *start_url_parsed, struct hash_table *blacklist, + struct iri *iri) { struct url *u = upos->url; const char *url = u->url; @@ -483,7 +493,7 @@ download_child_p (const struct urlpos *upos, struct url *parent, int depth, if (string_set_contains (blacklist, url)) { - if (opt.spider) + if (opt.spider) { char *referrer = url_string (parent, URL_AUTH_HIDE_PASSWD); DEBUGP (("download_child_p: parent->url is: %s\n", quote (parent->url))); @@ -495,15 +505,16 @@ download_child_p (const struct urlpos *upos, struct url *parent, int depth, } /* Several things to check for: - 1. if scheme is not http, and we don't load it - 2. check for relative links (if relative_only is set) - 3. check for domain - 4. check for no-parent - 5. check for excludes && includes - 6. check for suffix - 7. check for same host (if spanhost is unset), with possible + 1. if scheme is not https and https_only requested + 2. if scheme is not http, and we don't load it + 3. check for relative links (if relative_only is set) + 4. check for domain + 5. check for no-parent + 6. check for excludes && includes + 7. check for suffix + 8. check for same host (if spanhost is unset), with possible gethostbyname baggage - 8. check for robots.txt + 9. check for robots.txt Addendum: If the URL is FTP, and it is to be loaded, only the domain and suffix settings are "stronger". @@ -515,6 +526,14 @@ download_child_p (const struct urlpos *upos, struct url *parent, int depth, More time- and memory- consuming tests should be put later on the list. */ +#ifdef HAVE_SSL + if (opt.https_only && u->scheme != SCHEME_HTTPS) + { + DEBUGP (("Not following non-HTTPS links.\n")); + goto out; + } +#endif + /* Determine whether URL under consideration has a HTTP-like scheme. */ u_scheme_like_http = schemes_are_similar_p (u->scheme, SCHEME_HTTP); @@ -550,7 +569,8 @@ download_child_p (const struct urlpos *upos, struct url *parent, int depth, if (opt.no_parent && schemes_are_similar_p (u->scheme, start_url_parsed->scheme) && 0 == strcasecmp (u->host, start_url_parsed->host) - && u->port == start_url_parsed->port + && (u->scheme != start_url_parsed->scheme + || u->port == start_url_parsed->port) && !(opt.page_requisites && upos->link_inline_p)) { if (!subdir_p (start_url_parsed->dir, u->dir)) @@ -572,6 +592,11 @@ download_child_p (const struct urlpos *upos, struct url *parent, int depth, goto out; } } + if (!accept_url (url)) + { + DEBUGP (("%s is excluded/not-included through regex.\n", url)); + goto out; + } /* 6. Check for acceptance/rejection rules. We ignore these rules for directories (no file name to match) and for non-leaf HTMLs, @@ -614,7 +639,7 @@ download_child_p (const struct urlpos *upos, struct url *parent, int depth, if (!specs) { char *rfile; - if (res_retrieve_file (url, &rfile)) + if (res_retrieve_file (url, &rfile, iri)) { specs = res_parse_from_file (rfile); @@ -622,7 +647,7 @@ download_child_p (const struct urlpos *upos, struct url *parent, int depth, files after downloading or we're just running a spider. */ if (opt.delete_after || opt.spider) { - logprintf (LOG_VERBOSE, "Removing %s.\n", rfile); + logprintf (LOG_VERBOSE, _("Removing %s.\n"), rfile); if (unlink (rfile)) logprintf (LOG_NOTQUIET, "unlink: %s\n", strerror (errno)); @@ -669,7 +694,8 @@ download_child_p (const struct urlpos *upos, struct url *parent, int depth, static bool descend_redirect_p (const char *redirected, struct url *orig_parsed, int depth, - struct url *start_url_parsed, struct hash_table *blacklist) + struct url *start_url_parsed, struct hash_table *blacklist, + struct iri *iri) { struct url *new_parsed; struct urlpos *upos; @@ -677,14 +703,14 @@ descend_redirect_p (const char *redirected, struct url *orig_parsed, int depth, assert (orig_parsed != NULL); - new_parsed = url_parse (redirected, NULL); + new_parsed = url_parse (redirected, NULL, NULL, false); assert (new_parsed != NULL); upos = xnew0 (struct urlpos); upos->url = new_parsed; success = download_child_p (upos, orig_parsed, depth, - start_url_parsed, blacklist); + start_url_parsed, blacklist, iri); url_free (new_parsed); xfree (upos);