X-Git-Url: http://sjero.net/git/?a=blobdiff_plain;f=src%2Frecur.c;h=72274fb52673c36f0924ccbfb1dbc306b5f12bfa;hb=b718128b4f3eb8473fb3b31c8397b49854e74ab7;hp=4e95e86915104d80fb018e2b636a5faab8dfd36e;hpb=18bca2706b4c5c1f3bdad7349e092d0675608a61;p=wget diff --git a/src/recur.c b/src/recur.c index 4e95e869..72274fb5 100644 --- a/src/recur.c +++ b/src/recur.c @@ -1,6 +1,7 @@ /* Handling of recursive HTTP retrieving. - Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, - 2004, 2005, 2006, 2007, 2008 Free Software Foundation, Inc. + Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, + 2005, 2006, 2007, 2008, 2009, 2010, 2011 Free Software Foundation, + Inc. This file is part of GNU Wget. @@ -33,9 +34,7 @@ as that of the covered work. */ #include #include #include -#ifdef HAVE_UNISTD_H -# include -#endif /* HAVE_UNISTD_H */ +#include #include #include @@ -200,7 +199,6 @@ retrieve_tree (struct url *start_url_parsed, struct iri *pi) the queue, but haven't been downloaded yet. */ struct hash_table *blacklist; - int up_error_code; struct iri *i = iri_new (); #define COPYSTR(x) (x) ? xstrdup(x) : NULL; @@ -283,7 +281,7 @@ retrieve_tree (struct url *start_url_parsed, struct iri *pi) struct url *url_parsed = url_parse (url, &url_err, i, true); status = retrieve_url (url_parsed, url, &file, &redirected, referer, - &dt, false, i); + &dt, false, i, true); if (html_allowed && file && status == RETROK && (dt & RETROKF) && (dt & TEXTHTML)) @@ -565,7 +563,8 @@ download_child_p (const struct urlpos *upos, struct url *parent, int depth, if (opt.no_parent && schemes_are_similar_p (u->scheme, start_url_parsed->scheme) && 0 == strcasecmp (u->host, start_url_parsed->host) - && u->port == start_url_parsed->port + && (u->scheme != start_url_parsed->scheme + || u->port == start_url_parsed->port) && !(opt.page_requisites && upos->link_inline_p)) { if (!subdir_p (start_url_parsed->dir, u->dir)) @@ -587,6 +586,11 @@ download_child_p (const struct urlpos *upos, struct url *parent, int depth, goto out; } } + if (!accept_url (url)) + { + DEBUGP (("%s is excluded/not-included through regex.\n", url)); + goto out; + } /* 6. Check for acceptance/rejection rules. We ignore these rules for directories (no file name to match) and for non-leaf HTMLs,