X-Git-Url: http://sjero.net/git/?p=wget;a=blobdiff_plain;f=src%2Fres.c;h=69abd12dd5b60dc8d03193b3ddcd55f1003a7c63;hp=8c35f0e1cfae726a877e23e12b4ac44256d940ff;hb=d82f80ecab9bfef857d780f894cca7e890780ce0;hpb=c31e00b52d49632dd0f005269ab2b820c7fd2c34 diff --git a/src/res.c b/src/res.c index 8c35f0e1..69abd12d 100644 --- a/src/res.c +++ b/src/res.c @@ -532,21 +532,28 @@ res_get_specs (const char *host, int port) Return true if robots were retrieved OK, false otherwise. */ bool -res_retrieve_file (const char *url, char **file) +res_retrieve_file (const char *url, char **file, struct iri *iri) { + struct iri *i = iri_new (); uerr_t err; char *robots_url = uri_merge (url, RES_SPECS_LOCATION); int saved_ts_val = opt.timestamping; int saved_sp_val = opt.spider; + /* Copy server URI encoding for a possible IDNA transformation, no need to + encode the full URI in UTF-8 because "robots.txt" is plain ASCII */ + set_uri_encoding (i, iri->uri_encoding); + i->utf8_encode = false; + logputs (LOG_VERBOSE, _("Loading robots.txt; please ignore errors.\n")); *file = NULL; opt.timestamping = false; opt.spider = false; - err = retrieve_url (robots_url, file, NULL, NULL, NULL, false); + err = retrieve_url (robots_url, file, NULL, NULL, NULL, false, i); opt.timestamping = saved_ts_val; - opt.spider = saved_sp_val; + opt.spider = saved_sp_val; xfree (robots_url); + iri_free (i); if (err != RETROK && *file != NULL) {