#else
# include <strings.h>
#endif
-#include <ctype.h>
#include <sys/types.h>
#ifdef HAVE_UNISTD_H
# include <unistd.h>
extern int errno;
#endif
-/* Default port definitions */
-#define DEFAULT_HTTP_PORT 80
-#define DEFAULT_FTP_PORT 21
-
/* Table of Unsafe chars. This is intialized in
init_unsafe_char_table. */
if (contains_unsafe (s)) \
{ \
char *uc_tmp = encode_string (s); \
- free (s); \
+ xfree (s); \
(s) = uc_tmp; \
} \
} while (0)
static int urlpath_length PARAMS ((const char *));
/* NULL-terminated list of strings to be recognized as prototypes (URL
- schemes). Note that recognized doesn't mean supported -- only HTTP
- and FTP are currently supported.
+ schemes). Note that recognized doesn't mean supported -- only HTTP,
+ HTTPS and FTP are currently supported .
However, a string that does not match anything in the list will be
considered a relative URL. Thus it's important that this list has
static struct proto sup_protos[] =
{
{ "http://", URLHTTP, DEFAULT_HTTP_PORT },
+#ifdef HAVE_SSL
+ { "https://",URLHTTPS, DEFAULT_HTTPS_PORT},
+#endif
{ "ftp://", URLFTP, DEFAULT_FTP_PORT },
/*{ "file://", URLFILE, DEFAULT_FTP_PORT },*/
};
int i;
for (i = 0; i < 256; i++)
if (i < 32 || i >= 127
+ || i == ' '
|| i == '<'
|| i == '>'
|| i == '\"'
skip_uname (const char *url)
{
const char *p;
- for (p = url; *p && *p != '/'; p++)
- if (*p == '@')
- break;
+ const char *q = NULL;
+ for (p = url ; *p && *p != '/'; p++)
+ if (*p == '@') q = p;
/* If a `@' was found before the first occurrence of `/', skip
it. */
- if (*p == '@')
- return p - url + 1;
+ if (q != NULL)
+ return q - url + 1;
else
return 0;
}
if (u->proxy)
freeurl (u->proxy, 1);
if (complete)
- free (u);
+ xfree (u);
return;
}
\f
u->proto = type = URLHTTP;
if (!u->port)
{
- int i;
- for (i = 0; i < ARRAY_SIZE (sup_protos); i++)
- if (sup_protos[i].ind == type)
+ int ind;
+ for (ind = 0; ind < ARRAY_SIZE (sup_protos); ind++)
+ if (sup_protos[ind].ind == type)
break;
- if (i == ARRAY_SIZE (sup_protos))
+ if (ind == ARRAY_SIZE (sup_protos))
return URLUNKNOWN;
- u->port = sup_protos[i].port;
+ u->port = sup_protos[ind].port;
}
/* Some delimiter troubles... */
if (url[i] == '/' && url[i - 1] != ':')
/* #### We don't handle type `d' correctly yet. */
if (!u->ftp_type || TOUPPER (u->ftp_type) == 'D')
u->ftp_type = 'I';
+ DEBUGP (("ftp_type %c -> ", u->ftp_type));
}
DEBUGP (("opath %s -> ", u->path));
/* Parse the username and password (if existing). */
parse_uname (const char *url, char **user, char **passwd)
{
int l;
- const char *p, *col;
+ const char *p, *q, *col;
char **where;
*user = NULL;
if (*p != '@')
return URLOK;
/* Else find the username and password. */
- for (p = col = url; *p != '@'; p++)
+ for (p = q = col = url; *p != '/'; p++)
{
if (*p == ':' && !*user)
{
(*user)[p - url] = '\0';
col = p + 1;
}
+ if (*p == '@') q = p;
}
/* Decide whether you have only the username or both. */
where = *user ? passwd : user;
- *where = (char *)xmalloc (p - col + 1);
- memcpy (*where, col, p - col);
- (*where)[p - col] = '\0';
+ *where = (char *)xmalloc (q - col + 1);
+ memcpy (*where, col, q - col);
+ (*where)[q - col] = '\0';
return URLOK;
}
return '\0';
}
\f
-/* Return the URL as fine-formed string, with a proper protocol,
- optional port number, directory and optional user/password. If
- HIDE is non-zero, password will be hidden. The forbidden
- characters in the URL will be cleansed. */
+/* Return the URL as fine-formed string, with a proper protocol, optional port
+ number, directory and optional user/password. If `hide' is non-zero (as it
+ is when we're calling this on a URL we plan to print, but not when calling it
+ to canonicalize a URL for use within the program), password will be hidden.
+ The forbidden characters in the URL will be cleansed. */
char *
str_url (const struct urlinfo *u, int hide)
{
user = CLEANDUP (u->user);
if (u->passwd)
{
- int i;
- passwd = CLEANDUP (u->passwd);
if (hide)
- for (i = 0; passwd[i]; i++)
- passwd[i] = 'x';
+ /* Don't output the password, or someone might see it over the user's
+ shoulder (or in saved wget output). Don't give away the number of
+ characters in the password, either, as we did in past versions of
+ this code, when we replaced the password characters with 'x's. */
+ passwd = xstrdup("<password>");
+ else
+ passwd = CLEANDUP (u->passwd);
}
if (u->proto == URLFTP && *dir == '/')
{
tmp[1] = '2';
tmp[2] = 'F';
strcpy (tmp + 3, dir + 1);
- free (dir);
+ xfree (dir);
dir = tmp;
}
if (*dir)
res[l++] = '/';
strcpy (res + l, file);
- free (host);
- free (dir);
- free (file);
+ xfree (host);
+ xfree (dir);
+ xfree (file);
FREE_MAYBE (user);
FREE_MAYBE (passwd);
return res;
while (l)
{
urlpos *next = l->next;
- free (l->url);
+ xfree (l->url);
FREE_MAYBE (l->local_name);
- free (l);
+ xfree (l);
l = next;
}
}
{
if (S_ISDIR (st.st_mode))
{
- free (t);
+ xfree (t);
return 0;
}
else
res = make_directory (t);
if (res != 0)
logprintf (LOG_NOTQUIET, "%s: %s", t, strerror (errno));
- free (t);
+ xfree (t);
return res;
}
if (opt.add_hostdir && !opt.simple_check)
{
char *nhost = realhost (host);
- free (host);
+ xfree (host);
host = nhost;
}
/* Add dir_prefix and hostname (if required) to the beginning of
else
dirpref = "";
}
- free (host);
+ xfree (host);
/* If there is a prefix, prepend it. */
if (*dirpref)
/* Finally, construct the full name. */
res = (char *)xmalloc (strlen (dir) + 1 + strlen (file) + 1);
sprintf (res, "%s%s%s", dir, *dir ? "/" : "", file);
- free (dir);
+ xfree (dir);
return res;
}
char *nfile = (char *)xmalloc (strlen (opt.dir_prefix)
+ 1 + strlen (file) + 1);
sprintf (nfile, "%s/%s", opt.dir_prefix, file);
- free (file);
+ xfree (file);
file = nfile;
}
}
/* Find a unique name. */
name = unique_name (file);
- free (file);
+ xfree (file);
return name;
}
{
/* Find the "true" host. */
char *host = realhost (u->host);
- free (u->host);
+ xfree (u->host);
u->host = host;
assert (u->dir != NULL); /* the URL must have been parsed */
/* Refresh the printed representation. */
- free (u->url);
+ xfree (u->url);
u->url = str_url (u, 0);
}
return opt.http_proxy ? opt.http_proxy : getenv ("http_proxy");
else if (proto == URLFTP)
return opt.ftp_proxy ? opt.ftp_proxy : getenv ("ftp_proxy");
+#ifdef HAVE_SSL
+ else if (proto == URLHTTPS)
+ return opt.https_proxy ? opt.https_proxy : getenv ("https_proxy");
+#endif /* HAVE_SSL */
else
return NULL;
}
}
\f
static void write_backup_file PARAMS ((const char *, downloaded_file_t));
+static void replace_attr PARAMS ((const char **, int, FILE *, const char *));
/* Change the links in an HTML document. Accepts a structure that
defines the positions of all the links. */
{
struct file_memory *fm;
FILE *fp;
- char *p;
+ const char *p;
downloaded_file_t downloaded_file_return;
+ logprintf (LOG_VERBOSE, _("Converting %s... "), file);
+
{
/* First we do a "dry run": go through the list L and see whether
any URL needs to be converted in the first place. If not, just
int count = 0;
urlpos *dry = l;
for (dry = l; dry; dry = dry->next)
- if (dry->flags & (UABS2REL | UREL2ABS))
+ if (dry->convert != CO_NOCONVERT)
++count;
if (!count)
{
- logprintf (LOG_VERBOSE, _("Nothing to do while converting %s.\n"),
- file);
+ logputs (LOG_VERBOSE, _("nothing to do.\n"));
return;
}
}
- logprintf (LOG_VERBOSE, _("Converting %s... "), file);
-
fm = read_file (file);
if (!fm)
{
for (; l; l = l->next)
{
char *url_start = fm->content + l->pos;
+
if (l->pos >= fm->length)
{
DEBUGP (("Something strange is going on. Please investigate."));
break;
}
/* If the URL is not to be converted, skip it. */
- if (!(l->flags & (UABS2REL | UREL2ABS)))
+ if (l->convert == CO_NOCONVERT)
{
- DEBUGP (("Skipping %s at position %d (flags %d).\n", l->url,
- l->pos, l->flags));
+ DEBUGP (("Skipping %s at position %d.\n", l->url, l->pos));
continue;
}
quote, to the outfile. */
fwrite (p, 1, url_start - p, fp);
p = url_start;
- if (l->flags & UABS2REL)
+ if (l->convert == CO_CONVERT_TO_RELATIVE)
{
/* Convert absolute URL to relative. */
char *newname = construct_relative (file, l->local_name);
- putc (*p, fp); /* quoting char */
- fputs (newname, fp);
- p += l->size - 1;
- putc (*p, fp); /* close quote */
- ++p;
- DEBUGP (("ABS2REL: %s to %s at position %d in %s.\n",
+ char *quoted_newname = html_quote_string (newname);
+ replace_attr (&p, l->size, fp, quoted_newname);
+ DEBUGP (("TO_RELATIVE: %s to %s at position %d in %s.\n",
l->url, newname, l->pos, file));
- free (newname);
+ xfree (newname);
+ xfree (quoted_newname);
}
- else if (l->flags & UREL2ABS)
+ else if (l->convert == CO_CONVERT_TO_COMPLETE)
{
/* Convert the link to absolute URL. */
char *newlink = l->url;
- putc (*p, fp); /* quoting char */
- fputs (newlink, fp);
- p += l->size - 1;
- putc (*p, fp); /* close quote */
- ++p;
- DEBUGP (("REL2ABS: <something> to %s at position %d in %s.\n",
+ char *quoted_newlink = html_quote_string (newlink);
+ replace_attr (&p, l->size, fp, quoted_newlink);
+ DEBUGP (("TO_COMPLETE: <something> to %s at position %d in %s.\n",
newlink, l->pos, file));
+ xfree (quoted_newlink);
}
}
/* Output the rest of the file. */
rather than making a copy of the string... Another note is that I
thought I could just add a field to the urlpos structure saying
that we'd written a .orig file for this URL, but that didn't work,
- so I had to make this separate list. */
+ so I had to make this separate list.
+ -- Dan Harkless <wget@harkless.org>
+
+ This [adding a field to the urlpos structure] didn't work
+ because convert_file() is called twice: once after all its
+ sublinks have been retrieved in recursive_retrieve(), and
+ once at the end of the day in convert_all_links(). The
+ original linked list collected in recursive_retrieve() is
+ lost after the first invocation of convert_links(), and
+ convert_all_links() makes a new one (it calls get_urls_html()
+ for each file it covers.) That's why your first approach didn't
+ work. The way to make it work is perhaps to make this flag a
+ field in the `urls_html' list.
+ -- Hrvoje Niksic <hniksic@arsdigita.com>
+ */
converted_file_ptr = xmalloc(sizeof(*converted_file_ptr));
converted_file_ptr->string = xstrdup(file); /* die on out-of-mem. */
converted_file_ptr->next = converted_files;
}
}
+static int find_fragment PARAMS ((const char *, int, const char **,
+ const char **));
+
+static void
+replace_attr (const char **pp, int raw_size, FILE *fp, const char *new_str)
+{
+ const char *p = *pp;
+ int quote_flag = 0;
+ int size = raw_size;
+ char quote_char = '\"';
+ const char *frag_beg, *frag_end;
+
+ /* Structure of our string is:
+ "...old-contents..."
+ <--- l->size ---> (with quotes)
+ OR:
+ ...old-contents...
+ <--- l->size --> (no quotes) */
+
+ if (*p == '\"' || *p == '\'')
+ {
+ quote_char = *p;
+ quote_flag = 1;
+ ++p;
+ size -= 2; /* disregard opening and closing quote */
+ }
+ putc (quote_char, fp);
+ fputs (new_str, fp);
+
+ /* Look for fragment identifier, if any. */
+ if (find_fragment (p, size, &frag_beg, &frag_end))
+ fwrite (frag_beg, 1, frag_end - frag_beg, fp);
+ p += size;
+ if (quote_flag)
+ ++p;
+ putc (quote_char, fp);
+ *pp = p;
+}
+
+/* Find the first occurrence of '#' in [BEG, BEG+SIZE) that is not
+ preceded by '&'. If the character is not found, return zero. If
+ the character is found, return 1 and set BP and EP to point to the
+ beginning and end of the region.
+
+ This is used for finding the fragment indentifiers in URLs. */
+
+static int
+find_fragment (const char *beg, int size, const char **bp, const char **ep)
+{
+ const char *end = beg + size;
+ int saw_amp = 0;
+ for (; beg < end; beg++)
+ {
+ switch (*beg)
+ {
+ case '&':
+ saw_amp = 1;
+ break;
+ case '#':
+ if (!saw_amp)
+ {
+ *bp = beg;
+ *ep = end;
+ return 1;
+ }
+ /* fallthrough */
+ default:
+ saw_amp = 0;
+ }
+ }
+ return 0;
+}
+
+typedef struct _downloaded_file_list {
+ char* file;
+ downloaded_file_t download_type;
+ struct _downloaded_file_list* next;
+} downloaded_file_list;
+
+static downloaded_file_list *downloaded_files;
+
/* Remembers which files have been downloaded. In the standard case, should be
called with mode == FILE_DOWNLOADED_NORMALLY for each file we actually
download successfully (i.e. not for ones we have failures on or that we skip
downloaded_file_t
downloaded_file (downloaded_file_t mode, const char* file)
{
- typedef struct _downloaded_file_list
- {
- char* file;
- downloaded_file_t download_type;
- struct _downloaded_file_list* next;
- } downloaded_file_list;
-
boolean found_file = FALSE;
- static downloaded_file_list* downloaded_files = NULL;
downloaded_file_list* rover = downloaded_files;
while (rover != NULL)
return FILE_NOT_ALREADY_DOWNLOADED;
}
}
+
+void
+downloaded_files_free (void)
+{
+ downloaded_file_list* rover = downloaded_files;
+ while (rover)
+ {
+ downloaded_file_list *next = rover->next;
+ xfree (rover->file);
+ xfree (rover);
+ rover = next;
+ }
+}
\f
/* Initialization of static stuff. */
void