X-Git-Url: http://sjero.net/git/?p=wget;a=blobdiff_plain;f=src%2Frecur.c;h=00e7603163c1b2be1998c0f7dc74339af35253fc;hp=71fbe7bf1f73767b47b105e44042cb6e81cb8c88;hb=2f6aa1d7417df1dfc58597777686fbd77179b9fd;hpb=84395897ad2d1c107be470946daba744b2e7ebe8 diff --git a/src/recur.c b/src/recur.c index 71fbe7bf..00e76031 100644 --- a/src/recur.c +++ b/src/recur.c @@ -1,6 +1,7 @@ /* Handling of recursive HTTP retrieving. - Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, - 2004, 2005, 2006, 2007, 2008 Free Software Foundation, Inc. + Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, + 2005, 2006, 2007, 2008, 2009, 2010, 2011 Free Software Foundation, + Inc. This file is part of GNU Wget. @@ -33,9 +34,7 @@ as that of the covered work. */ #include #include #include -#ifdef HAVE_UNISTD_H -# include -#endif /* HAVE_UNISTD_H */ +#include #include #include @@ -111,12 +110,13 @@ url_enqueue (struct url_queue *queue, struct iri *i, if (queue->count > queue->maxcount) queue->maxcount = queue->count; - DEBUGP (("Enqueuing %s at depth %d\n", url, depth)); + DEBUGP (("Enqueuing %s at depth %d\n", + quotearg_n_style (0, escape_quoting_style, url), depth)); DEBUGP (("Queue count %d, maxcount %d.\n", queue->count, queue->maxcount)); if (i) - DEBUGP (("[IRI Enqueuing %s with %s\n", quote (url), - i->uri_encoding ? quote (i->uri_encoding) : "None")); + DEBUGP (("[IRI Enqueuing %s with %s\n", quote_n (0, url), + i->uri_encoding ? quote_n (1, i->uri_encoding) : "None")); if (queue->tail) queue->tail->next = qel; @@ -152,7 +152,8 @@ url_dequeue (struct url_queue *queue, struct iri **i, --queue->count; - DEBUGP (("Dequeuing %s at depth %d\n", qel->url, qel->depth)); + DEBUGP (("Dequeuing %s at depth %d\n", + quotearg_n_style (0, escape_quoting_style, qel->url), qel->depth)); DEBUGP (("Queue count %d, maxcount %d.\n", queue->count, queue->maxcount)); xfree (qel); @@ -161,7 +162,7 @@ url_dequeue (struct url_queue *queue, struct iri **i, static bool download_child_p (const struct urlpos *, struct url *, int, struct url *, struct hash_table *, struct iri *); -static bool descend_redirect_p (const char *, const char *, int, +static bool descend_redirect_p (const char *, struct url *, int, struct url *, struct hash_table *, struct iri *); @@ -187,7 +188,7 @@ static bool descend_redirect_p (const char *, const char *, int, options, add it to the queue. */ uerr_t -retrieve_tree (const char *start_url) +retrieve_tree (struct url *start_url_parsed, struct iri *pi) { uerr_t status = RETROK; @@ -198,18 +199,19 @@ retrieve_tree (const char *start_url) the queue, but haven't been downloaded yet. */ struct hash_table *blacklist; - int up_error_code; - struct url *start_url_parsed; struct iri *i = iri_new (); - set_uri_encoding (i, opt.locale, true); - start_url_parsed = url_parse (start_url, &up_error_code, i); - if (!start_url_parsed) +#define COPYSTR(x) (x) ? xstrdup(x) : NULL; + /* Duplicate pi struct if not NULL */ + if (pi) { - logprintf (LOG_NOTQUIET, "%s: %s.\n", start_url, - url_error (up_error_code)); - return URLERROR; + i->uri_encoding = COPYSTR (pi->uri_encoding); + i->content_encoding = COPYSTR (pi->content_encoding); + i->utf8_encode = pi->utf8_encode; } + else + set_uri_encoding (i, opt.locale, true); +#undef COPYSTR queue = url_queue_new (); blacklist = make_string_hash_table (0); @@ -274,11 +276,12 @@ retrieve_tree (const char *start_url) } else { - int dt = 0; + int dt = 0, url_err; char *redirected = NULL; + struct url *url_parsed = url_parse (url, &url_err, i, true); - status = retrieve_url (url, &file, &redirected, referer, &dt, - false, i); + status = retrieve_url (url_parsed, url, &file, &redirected, referer, + &dt, false, i, true); if (html_allowed && file && status == RETROK && (dt & RETROKF) && (dt & TEXTHTML)) @@ -305,7 +308,7 @@ retrieve_tree (const char *start_url) want to follow it. */ if (descend) { - if (!descend_redirect_p (redirected, url, depth, + if (!descend_redirect_p (redirected, url_parsed, depth, start_url_parsed, blacklist, i)) descend = false; else @@ -317,6 +320,12 @@ retrieve_tree (const char *start_url) xfree (url); url = redirected; } + else + { + xfree (url); + url = xstrdup (url_parsed->url); + } + url_free(url_parsed); } if (opt.spider) @@ -369,7 +378,7 @@ retrieve_tree (const char *start_url) if (children) { struct urlpos *child = children; - struct url *url_parsed = url_parse (url, NULL, i); + struct url *url_parsed = url_parse (url, NULL, i, true); struct iri *ci; char *referer_url = url; bool strip_auth = (url_parsed != NULL @@ -456,8 +465,6 @@ retrieve_tree (const char *start_url) } url_queue_delete (queue); - if (start_url_parsed) - url_free (start_url_parsed); string_set_free (blacklist); if (opt.quota && total_downloaded_bytes > opt.quota) @@ -674,18 +681,17 @@ download_child_p (const struct urlpos *upos, struct url *parent, int depth, it is merely a simple-minded wrapper around download_child_p. */ static bool -descend_redirect_p (const char *redirected, const char *original, int depth, +descend_redirect_p (const char *redirected, struct url *orig_parsed, int depth, struct url *start_url_parsed, struct hash_table *blacklist, struct iri *iri) { - struct url *orig_parsed, *new_parsed; + struct url *new_parsed; struct urlpos *upos; bool success; - orig_parsed = url_parse (original, NULL, NULL); assert (orig_parsed != NULL); - new_parsed = url_parse (redirected, NULL, NULL); + new_parsed = url_parse (redirected, NULL, NULL, false); assert (new_parsed != NULL); upos = xnew0 (struct urlpos); @@ -694,7 +700,6 @@ descend_redirect_p (const char *redirected, const char *original, int depth, success = download_child_p (upos, orig_parsed, depth, start_url_parsed, blacklist, iri); - url_free (orig_parsed); url_free (new_parsed); xfree (upos);