#include "convert.h"
#include "ptimer.h"
#include "iri.h"
+#include "html-url.h"
/* Total size of downloaded files. Used to enforce quota. */
SUM_SIZE_INT total_downloaded_bytes;
char *saved_post_data = NULL;
char *saved_post_file_name = NULL;
- bool utf8_encoded = opt.enable_iri;
-
/* If dt is NULL, use local storage. */
if (!dt)
{
if (file)
*file = NULL;
+ reset_utf8_encode ();
+
second_try:
- u = url_parse (url, &up_error_code, &utf8_encoded);
+ u = url_parse (url, &up_error_code);
if (!u)
{
logprintf (LOG_NOTQUIET, "%s: %s.\n", url, url_error (up_error_code));
if (proxy)
{
/* sXXXav : support IRI for proxy */
- bool proxy_utf8_encode = false;
/* Parse the proxy URL. */
- proxy_url = url_parse (proxy, &up_error_code, &proxy_utf8_encode);
+ set_ugly_no_encode (true);
+ proxy_url = url_parse (proxy, &up_error_code);
+ set_ugly_no_encode (false);
if (!proxy_url)
{
logprintf (LOG_NOTQUIET, _("Error parsing proxy URL %s: %s.\n"),
xfree (mynewloc);
mynewloc = construced_newloc;
- utf8_encoded = opt.enable_iri;
+ reset_utf8_encode ();
/* Now, see if this new location makes sense. */
- newloc_parsed = url_parse (mynewloc, &up_error_code, &utf8_encoded);
+ newloc_parsed = url_parse (mynewloc, &up_error_code);
if (!newloc_parsed)
{
logprintf (LOG_NOTQUIET, "%s: %s.\n", escnonprint_uri (mynewloc),
}
/* Try to not encode in UTF-8 if fetching failed */
- if (result != RETROK && utf8_encoded)
+ if (!(*dt & RETROKF) && get_utf8_encode ())
{
- utf8_encoded = false;
+ set_utf8_encode (false);
/*printf ("[Fallbacking to non-utf8 for `%s'\n", url);*/
goto second_try;
}
register_redirection (origurl, u->url);
if (*dt & TEXTHTML)
register_html (u->url, local_file);
+ if (*dt & RETROKF)
+ {
+ register_download (u->url, local_file);
+ if (redirection_count && 0 != strcmp (origurl, u->url))
+ register_redirection (origurl, u->url);
+ if (*dt & TEXTHTML)
+ register_html (u->url, local_file);
+ if (*dt & TEXTCSS)
+ register_css (u->url, local_file);
+ }
}
if (file)
uerr_t status;
struct urlpos *url_list, *cur_url;
- url_list = (html ? get_urls_html (file, NULL, NULL)
- : get_urls_file (file));
+ char *input_file = NULL;
+ const char *url = file;
+
status = RETROK; /* Suppose everything is OK. */
*count = 0; /* Reset the URL count. */
+
+ if (url_has_scheme (url))
+ {
+ uerr_t status;
+ status = retrieve_url (url, &input_file, NULL, NULL, NULL, false);
+ if (status != RETROK)
+ return status;
+ }
+ else
+ input_file = (char *) file;
+
+ url_list = (html ? get_urls_html (input_file, NULL, NULL)
+ : get_urls_file (input_file));
for (cur_url = url_list; cur_url; cur_url = cur_url->next, ++*count)
{
bool
url_uses_proxy (const char *url)
{
- bool ret, utf8_encode = false;
- struct url *u = url_parse (url, NULL, &utf8_encode);
+ bool ret;
+ struct url *u;
+ set_ugly_no_encode(true);
+ u= url_parse (url, NULL);
+ set_ugly_no_encode(false);
if (!u)
return false;
ret = getproxy (u) != NULL;