if (refurl)
u->referer = xstrdup (refurl);
else
- u->referer = NULL;
+ {
+ if (opt.referer)
+ u->referer = xstrdup (opt.referer);
+ else
+ u->referer = NULL;
+ }
local_use_proxy = USE_PROXY_P (u);
if (local_use_proxy)
if (result != URLOK || u->proto != URLHTTP)
{
if (u->proto == URLHTTP)
- logprintf (LOG_NOTQUIET, "Proxy %s: %s.\n", proxy, uerrmsg (result));
+ logprintf (LOG_NOTQUIET, "Proxy %s: %s.\n", proxy, uerrmsg(result));
else
logprintf (LOG_NOTQUIET, _("Proxy %s: Must be HTTP.\n"), proxy);
freeurl (u, 1);
return result;
}
-/* Find the URL-s in the file and call retrieve_url() for each of
+/* Find the URLs in the file and call retrieve_url() for each of
them. If HTML is non-zero, treat the file as HTML, and construct
- the URL-s accordingly.
+ the URLs accordingly.
If opt.recursive is set, call recursive_retrieve() for each file. */
uerr_t
/* If spider-mode is on, we do not want get_urls_html barfing
errors on baseless links. */
- url_list = (html ? get_urls_html (file, NULL, opt.spider)
+ url_list = (html ? get_urls_html (file, NULL, opt.spider, FALSE)
: get_urls_file (file));
status = RETROK; /* Suppose everything is OK. */
*count = 0; /* Reset the URL count. */
}
status = retrieve_url (cur_url->url, &filename, &new_file, NULL, &dt);
if (opt.recursive && status == RETROK && (dt & TEXTHTML))
- status = recursive_retrieve (filename, new_file ? new_file : cur_url->url);
+ status = recursive_retrieve (filename, new_file ? new_file
+ : cur_url->url);
if (filename && opt.delete_after && file_exists_p (filename))
{