+2009-06-12 Micah Cowan <micah@cowan.name>
+
+ * host.c: Include <sys/types.h> before <sys/socket.h>. Not
+ required by POSIX any more, but some older systems (such as
+ FreeBSD 4.1) still need it, and it doesn't seem like it could
+ hurt...
+
+ * build_info.c (library): Handle "https" as a feature in its own
+ right, apart from "gnutls" and "openssl".
+
+ * host.c: Declare h_errno if no declaration is provided. Idea
+ thanks to Maciej W. Rozycki.
+
+2009-06-11 Xin Zou <zouxin2008@gmail.com>
+
+ * http.c (gethttp): Fix some memory leaks.
+
+2009-06-11 Micah Cowan <micah@cowan.name>
+
+ * http.c (http_atotm): Handle potential for setlocale's return
+ value to be static storage. Thanks to Benjamin Wolsey
+ <bwy@benjaminwolsey.de>.
+
+ * sysdep.h: Need NAMESPACE_TWEAKS on non-Linux glibc-based
+ systems, too. Thanks to Robert Millan.
+
+2009-05-28 Steven Schubiger <stsc@member.fsf.org>
+
+ * ftp.c (ftp_get_listing): Update the "listing file"
+ string after calling ftp_loop_internal().
+
+2009-05-27 Steven Schubiger <stsc@member.fsf.org>
+
+ * ftp.c (ftp_get_listing): Duplicate the "listing file"
+ string to avoid memory corruption when FOPEN_EXCL_ERR is
+ encountered.
+
+2009-05-17 Steven Schubiger <stsc@member.fsf.org>
+
+ * progress.c (eta_to_human_short): Fix the remaining hours
+ to be displayed. Spotted by Tadeu Martins (#26411).
+
+2009-04-24 Micah Cowan <micah@cowan.name>
+
+ * hash.c: Change stdint.h inclusion to use HAVE_STDINT_H, not C99
+ check.
+
+ * connect.c: stdint.h inclusion added.
+
+ Thanks to Markus Duft <mduft@gentoo.org> for a similar patch.
+
+2009-04-20 Micah Cowan <micah@cowan.name>
+
+ * Makefile.am (version.c): Fix unportable use of "echo -n".
+
+2009-04-13 Steven Schubiger <stsc@member.fsf.org>
+
+ * ftp.c (ftp_retrieve_list): Move the duplicated code that
+ determines the local file to a function.
+
+ * http.c (http_loop): Likewise.
+
+ * retr.c (set_local_file): New function.
+
+2009-04-11 Steven Schubiger <stsc@member.fsf.org>
+
+ * init.c (initialize): Run a custom SYSTEM_WGETRC when
+ provided as an environment variable.
+
+2009-02-27 Gisle Vanem <gvanem@broadpark.no>
+
+ * main.c (main): "freopen (NULL,.." causes an assertion in MSVC
+ debug-mode. I.e. NULL isn't legal. But the "CONOUT$" device works
+ fine.
+
+2009-02-27 Steven Schubiger <stsc@member.fsf.org>
+
+ * ftp.c (ftp_loop_internal): Don't claim for FTP retrievals
+ when writing to standard output either that the document
+ has been saved. Addresses bug #20520 again.
+
+2009-02-21 Steven Schubiger <stsc@member.fsf.org>
+
+ * http.c (http_loop): When a document is written to
+ standard output, don't claim it has been saved to a file.
+ Addresses bug #20520.
+
+2009-02-18 Steven Schubiger <stsc@members.fsf.org>
+
+ * recur.h: Remove the dangling declaration for recursive_cleanup().
+
+ 2009-02-01 Gerardo E. Gidoni <gerel@gnu.org>
+
+ * main.c, recur.c, recur.h, res.c, retr.c, retr.h: restructured code to
+ avoid multiple 'url_parse' calls.
+
2008-11-13 Micah Cowan <micah@cowan.name>
* http.c (gethttp): Don't do anything when content-length >= our
/* Command line parsing.
Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003,
- 2004, 2005, 2006, 2007, 2008 Free Software Foundation, Inc.
+ 2004, 2005, 2006, 2007, 2008, 2009 Free Software Foundation, Inc.
This file is part of GNU Wget.
{
#ifdef WINDOWS
FILE *result;
- result = freopen (NULL, "wb", stdout);
+ result = freopen ("CONOUT$", "wb", stdout);
if (result == NULL)
{
logputs (LOG_NOTQUIET, _("\
for (t = url; *t; t++)
{
char *filename = NULL, *redirected_URL = NULL;
- int dt;
+ int dt, url_err;
+ struct url *url_parsed = url_parse (*t, &url_err);
- if ((opt.recursive || opt.page_requisites)
- && (url_scheme (*t) != SCHEME_FTP || url_uses_proxy (*t)))
+ if (!url_parsed)
{
- int old_follow_ftp = opt.follow_ftp;
+ char *error = url_error (*t, url_err);
+ logprintf (LOG_NOTQUIET, "%s: %s.\n",*t, error);
+ xfree (error);
+ status = URLERROR;
+ }
+ else
+ {
+ if ((opt.recursive || opt.page_requisites)
+ && (url_scheme (*t) != SCHEME_FTP || url_uses_proxy (url_parsed)))
+ {
+ int old_follow_ftp = opt.follow_ftp;
- /* Turn opt.follow_ftp on in case of recursive FTP retrieval */
- if (url_scheme (*t) == SCHEME_FTP)
- opt.follow_ftp = 1;
+ /* Turn opt.follow_ftp on in case of recursive FTP retrieval */
+ if (url_scheme (*t) == SCHEME_FTP)
+ opt.follow_ftp = 1;
- status = retrieve_tree (*t);
+ status = retrieve_tree (url_parsed);
- opt.follow_ftp = old_follow_ftp;
- }
- else
- status = retrieve_url (*t, &filename, &redirected_URL, NULL, &dt, opt.recursive);
+ opt.follow_ftp = old_follow_ftp;
+ }
+ else
+ status = retrieve_url (url_parsed, *t, &filename, &redirected_URL, NULL, &dt, opt.recursive);
- if (opt.delete_after && file_exists_p(filename))
- {
- DEBUGP (("Removing file due to --delete-after in main():\n"));
- logprintf (LOG_VERBOSE, _("Removing %s.\n"), filename);
- if (unlink (filename))
- logprintf (LOG_NOTQUIET, "unlink: %s\n", strerror (errno));
+ if (opt.delete_after && file_exists_p(filename))
+ {
+ DEBUGP (("Removing file due to --delete-after in main():\n"));
+ logprintf (LOG_VERBOSE, _("Removing %s.\n"), filename);
+ if (unlink (filename))
+ logprintf (LOG_NOTQUIET, "unlink: %s\n", strerror (errno));
+ }
+ xfree_null (redirected_URL);
+ xfree_null (filename);
+ url_free (url_parsed);
}
-
- xfree_null (redirected_URL);
- xfree_null (filename);
}
/* And then from the input file, if any. */
/* Declarations for recur.c.
Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003,
- 2004, 2005, 2006, 2007, 2008 Free Software Foundation, Inc.
+ 2004, 2005, 2006, 2007, 2008, 2009 Free Software Foundation, Inc.
This file is part of GNU Wget.
#ifndef RECUR_H
#define RECUR_H
+ #include "url.h"
+
/* For most options, 0 means no limits, but with -p in the picture,
that causes a problem on the maximum recursion depth variable. To
retain backwards compatibility we allow users to consider "0" to be
struct urlpos;
- uerr_t retrieve_tree (const char *);
+ void recursive_cleanup (void);
+ uerr_t retrieve_tree (struct url *);
#endif /* RECUR_H */
multiple points. */
uerr_t
- retrieve_url (const char *origurl, char **file, char **newloc,
- const char *refurl, int *dt, bool recursive)
+ retrieve_url (struct url * orig_parsed, const char *origurl, char **file,
+ char **newloc, const char *refurl, int *dt, bool recursive)
{
uerr_t result;
char *url;
bool location_changed;
int dummy;
char *mynewloc, *proxy;
- struct url *u, *proxy_url;
+ struct url *u = orig_parsed, *proxy_url;
int up_error_code; /* url parse error code */
char *local_file;
int redirection_count = 0;
if (file)
*file = NULL;
- u = url_parse (url, &up_error_code);
- if (!u)
- {
- char *error = url_error (url, up_error_code);
- logprintf (LOG_NOTQUIET, "%s: %s.\n", url, error);
- xfree (url);
- xfree (error);
- return URLERROR;
- }
-
if (!refurl)
refurl = opt.referer;
char *error = url_error (mynewloc, up_error_code);
logprintf (LOG_NOTQUIET, "%s: %s.\n", escnonprint_uri (mynewloc),
error);
- url_free (u);
+ if (orig_parsed != u)
+ {
+ url_free (u);
+ }
xfree (url);
xfree (mynewloc);
xfree (error);
logprintf (LOG_NOTQUIET, _("%d redirections exceeded.\n"),
opt.max_redirect);
url_free (newloc_parsed);
- url_free (u);
+ if (orig_parsed != u)
+ {
+ url_free (u);
+ }
xfree (url);
xfree (mynewloc);
RESTORE_POST_DATA;
xfree (url);
url = mynewloc;
- url_free (u);
+ if (orig_parsed != u)
+ {
+ url_free (u);
+ }
u = newloc_parsed;
/* If we're being redirected from POST, we don't want to POST
else
xfree_null (local_file);
- url_free (u);
+ if (orig_parsed != u)
+ {
+ url_free (u);
+ }
if (redirection_count)
{
if (url_has_scheme (url))
{
- int dt;
+ int dt,url_err;
uerr_t status;
+ struct url * url_parsed = url_parse(url, &url_err);
+
+ if (!url_parsed)
+ {
+ char *error = url_error (url, url_err);
+ logprintf (LOG_NOTQUIET, "%s: %s.\n", url, error);
+ xfree (error);
+ return URLERROR;
+ }
if (!opt.base_href)
opt.base_href = xstrdup (url);
- status = retrieve_url (url, &input_file, NULL, NULL, &dt, false);
+ status = retrieve_url (url_parsed, url, &input_file, NULL, NULL, &dt, false);
if (status != RETROK)
return status;
if (cur_url->url->scheme == SCHEME_FTP)
opt.follow_ftp = 1;
- status = retrieve_tree (cur_url->url->url);
+ status = retrieve_tree (cur_url->url);
opt.follow_ftp = old_follow_ftp;
}
else
- status = retrieve_url (cur_url->url->url, &filename, &new_file, NULL, &dt, opt.recursive);
+ {
+ status = retrieve_url (cur_url->url, cur_url->url->url, &filename,
+ &new_file, NULL, &dt, opt.recursive);
+ }
if (filename && opt.delete_after && file_exists_p (filename))
{
/* Returns true if URL would be downloaded through a proxy. */
bool
- url_uses_proxy (const char *url)
+ url_uses_proxy (struct url * u)
{
bool ret;
- struct url *u = url_parse (url, NULL);
if (!u)
return false;
ret = getproxy (u) != NULL;
- url_free (u);
return ret;
}
else
return sufmatch (no_proxy, host);
}
+
+/* Set the file parameter to point to the local file string. */
+void
+set_local_file (const char **file, const char *default_file)
+{
+ if (opt.output_document)
+ {
+ if (output_stream_regular)
+ *file = opt.output_document;
+ }
+ else
+ *file = default_file;
+}