/* Handling of recursive HTTP retrieving.
- Copyright (C) 1996-2006 Free Software Foundation, Inc.
+ Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003,
+ 2004, 2005, 2006, 2007 Free Software Foundation, Inc.
This file is part of GNU Wget.
struct urlpos *child = children;
struct url *url_parsed = url_parsed = url_parse (url, NULL);
char *referer_url = url;
- bool strip_auth = url_parsed->user;
+ bool strip_auth = (url_parsed != NULL
+ && url_parsed->user != NULL);
assert (url_parsed != NULL);
/* Strip auth info if present */
if (res_retrieve_file (url, &rfile))
{
specs = res_parse_from_file (rfile);
+
+ /* Delete the robots.txt file if we chose to either delete the
+ files after downloading or we're just running a spider. */
+ if (opt.delete_after || opt.spider)
+ {
+ logprintf (LOG_VERBOSE, "Removing %s.\n", rfile);
+ unlink (rfile);
+ }
+
xfree (rfile);
}
else