X-Git-Url: http://sjero.net/git/?p=wget;a=blobdiff_plain;f=src%2Fres.c;h=0996c90370104013c0fb1b93b61466bc45988c79;hp=0320d034246cfce5639397522a1a41f9789fdbb8;hb=2f6aa1d7417df1dfc58597777686fbd77179b9fd;hpb=b967d49f79b6e0ce73559bd30d231bddc2e4b232 diff --git a/src/res.c b/src/res.c index 0320d034..0996c903 100644 --- a/src/res.c +++ b/src/res.c @@ -1,5 +1,6 @@ /* Support for Robot Exclusion Standard (RES). - Copyright (C) 2001, 2006, 2007, 2008 Free Software Foundation, Inc. + Copyright (C) 2001, 2006, 2007, 2008, 2009, 2010, 2011 Free Software + Foundation, Inc. This file is part of Wget. @@ -384,7 +385,7 @@ struct robot_specs * res_parse_from_file (const char *filename) { struct robot_specs *specs; - struct file_memory *fm = read_file (filename); + struct file_memory *fm = wget_read_file (filename); if (!fm) { logprintf (LOG_NOTQUIET, _("Cannot open %s: %s"), @@ -392,7 +393,7 @@ res_parse_from_file (const char *filename) return NULL; } specs = res_parse (fm->content, fm->length); - read_file_free (fm); + wget_read_file_free (fm); return specs; } @@ -538,7 +539,8 @@ res_retrieve_file (const char *url, char **file, struct iri *iri) uerr_t err; char *robots_url = uri_merge (url, RES_SPECS_LOCATION); int saved_ts_val = opt.timestamping; - int saved_sp_val = opt.spider; + int saved_sp_val = opt.spider, url_err; + struct url * url_parsed; /* Copy server URI encoding for a possible IDNA transformation, no need to encode the full URI in UTF-8 because "robots.txt" is plain ASCII */ @@ -549,7 +551,22 @@ res_retrieve_file (const char *url, char **file, struct iri *iri) *file = NULL; opt.timestamping = false; opt.spider = false; - err = retrieve_url (robots_url, file, NULL, NULL, NULL, false, i); + + url_parsed = url_parse (robots_url, &url_err, iri, true); + if (!url_parsed) + { + char *error = url_error (robots_url, url_err); + logprintf (LOG_NOTQUIET, "%s: %s.\n", robots_url, error); + xfree (error); + err = URLERROR; + } + else + { + err = retrieve_url (url_parsed, robots_url, file, NULL, NULL, NULL, + false, i, false); + url_free(url_parsed); + } + opt.timestamping = saved_ts_val; opt.spider = saved_sp_val; xfree (robots_url); @@ -573,7 +590,7 @@ is_robots_txt_url (const char *url) bool ret = are_urls_equal (url, robots_url); xfree (robots_url); - + return ret; } @@ -609,10 +626,10 @@ test_is_robots_txt_url() { "http://www.yoyodyne.com/somepath/", false }, { "http://www.yoyodyne.com/somepath/robots.txt", false }, }; - - for (i = 0; i < sizeof(test_array)/sizeof(test_array[0]); ++i) + + for (i = 0; i < sizeof(test_array)/sizeof(test_array[0]); ++i) { - mu_assert ("test_is_robots_txt_url: wrong result", + mu_assert ("test_is_robots_txt_url: wrong result", is_robots_txt_url (test_array[i].url) == test_array[i].expected_result); }