#include <stdio.h>
#include <stdlib.h>
-#ifdef HAVE_UNISTD_H
-# include <unistd.h>
-#endif /* HAVE_UNISTD_H */
+#include <unistd.h>
#include <string.h>
#include <signal.h>
#ifdef ENABLE_NLS
#include "convert.h"
#include "spider.h"
#include "http.h" /* for save_cookies */
+#include "ptimer.h"
#include <getopt.h>
#include <getpass.h>
{ IF_SSL ("certificate-type"), 0, OPT_VALUE, "certificatetype", -1 },
{ IF_SSL ("check-certificate"), 0, OPT_BOOLEAN, "checkcertificate", -1 },
{ "clobber", 0, OPT__CLOBBER, NULL, optional_argument },
+ { "config", 0, OPT_VALUE, "chooseconfig", -1 },
{ "connect-timeout", 0, OPT_VALUE, "connecttimeout", -1 },
{ "continue", 'c', OPT_BOOLEAN, "continue", -1 },
{ "convert-links", 'k', OPT_BOOLEAN, "convertlinks", -1 },
{ "timeout", 'T', OPT_VALUE, "timeout", -1 },
{ "timestamping", 'N', OPT_BOOLEAN, "timestamping", -1 },
{ "tries", 't', OPT_VALUE, "tries", -1 },
+ { "unlink", 0, OPT_BOOLEAN, "unlink", -1 },
+ { "trust-server-names", 0, OPT_BOOLEAN, "trustservernames", -1 },
{ "use-server-timestamps", 0, OPT_BOOLEAN, "useservertimestamps", -1 },
{ "user", 0, OPT_VALUE, "user", -1 },
{ "user-agent", 'U', OPT_VALUE, "useragent", -1 },
N_("\
-B, --base=URL resolves HTML input-file links (-i -F)\n\
relative to URL.\n"),
+ N_("\
+ --config=FILE Specify config file to use.\n"),
"\n",
N_("\
N_("\
--waitretry=SECONDS wait 1..SECONDS between retries of a retrieval.\n"),
N_("\
- --random-wait wait from 0...2*WAIT secs between retrievals.\n"),
+ --random-wait wait from 0.5*WAIT...1.5*WAIT secs between retrievals.\n"),
N_("\
--no-proxy explicitly turn off proxy.\n"),
N_("\
--local-encoding=ENC use ENC as the local encoding for IRIs.\n"),
N_("\
--remote-encoding=ENC use ENC as the default remote encoding.\n"),
+ N_("\
+ --unlink remove file before clobber.\n"),
"\n",
N_("\
N_("\
-I, --include-directories=LIST list of allowed directories.\n"),
N_("\
+ --trust-server-names use the name specified by the redirection\n\
+ url last component.\n"),
+ N_("\
-X, --exclude-directories=LIST list of excluded directories.\n"),
N_("\
-np, --no-parent don't ascend to the parent directory.\n"),
{
char **url, **t;
int i, ret, longindex;
- int nurl, status;
+ int nurl;
bool append_to_log = false;
program_name = argv[0];
+ struct ptimer *timer = ptimer_new ();
+ double start_time = ptimer_measure (timer);
+
i18n_initialize ();
/* Construct the name of the executable, without the directory part. */
windows_main ((char **) &exec_name);
#endif
- /* Set option defaults; read the system wgetrc and ~/.wgetrc. */
- initialize ();
+ /* Load the hard-coded defaults. */
+ defaults ();
init_switches ();
+
+ /* This seperate getopt_long is needed to find the user config
+ and parse it before the other user options. */
+ longindex = -1;
+ int retconf;
+ bool use_userconfig = false;
+
+ while ((retconf = getopt_long (argc, argv,
+ short_options, long_options, &longindex)) != -1)
+ {
+ int confval;
+ bool userrc_ret = true;
+ struct cmdline_option *config_opt;
+ confval = long_options[longindex].val;
+ config_opt = &option_data[confval & ~BOOLEAN_NEG_MARKER];
+ if (strcmp (config_opt->long_name, "config") == 0)
+ {
+ userrc_ret &= run_wgetrc (optarg);
+ use_userconfig = true;
+ }
+ if (!userrc_ret)
+ {
+ printf ("Exiting due to error in %s\n", optarg);
+ exit (2);
+ }
+ else
+ break;
+ }
+
+ /* If the user did not specify a config, read the system wgetrc and ~/.wgetrc. */
+ if (use_userconfig == false)
+ initialize ();
+
+ opterr = 0;
+ optind = 0;
+
longindex = -1;
while ((ret = getopt_long (argc, argv,
short_options, long_options, &longindex)) != -1)
short options for convenience and backward
compatibility. */
char *p;
- for (p = optarg; *p; p++)
+ for (p = optarg; p && *p; p++)
switch (*p)
{
case 'v':
signal (SIGWINCH, progress_handle_sigwinch);
#endif
- status = RETROK; /* initialize it, just-in-case */
/* Retrieve the URLs from argument list. */
for (t = url; *t; t++)
{
char *error = url_error (*t, url_err);
logprintf (LOG_NOTQUIET, "%s: %s.\n",*t, error);
xfree (error);
- status = URLERROR;
}
else
{
if (url_scheme (*t) == SCHEME_FTP)
opt.follow_ftp = 1;
- status = retrieve_tree (url_parsed, NULL);
+ retrieve_tree (url_parsed, NULL);
opt.follow_ftp = old_follow_ftp;
}
else
{
- status = retrieve_url (url_parsed, *t, &filename, &redirected_URL,
- NULL, &dt, opt.recursive, iri, true);
+ retrieve_url (url_parsed, *t, &filename, &redirected_URL, NULL,
+ &dt, opt.recursive, iri, true);
}
if (opt.delete_after && file_exists_p(filename))
if (opt.input_filename)
{
int count;
- status = retrieve_from_file (opt.input_filename, opt.force_html, &count);
+ retrieve_from_file (opt.input_filename, opt.force_html, &count);
if (!count)
logprintf (LOG_NOTQUIET, _("No URLs found in %s.\n"),
opt.input_filename);
&&
total_downloaded_bytes != 0)
{
+ double end_time = ptimer_measure (timer);
+ ptimer_destroy (timer);
+
+ char *wall_time = xstrdup (secs_to_human_time (end_time - start_time));
+ char *download_time = xstrdup (secs_to_human_time (total_download_time));
logprintf (LOG_NOTQUIET,
- _("FINISHED --%s--\nDownloaded: %d files, %s in %s (%s)\n"),
- datetime_str (time (NULL)),
- numurls,
- human_readable (total_downloaded_bytes),
- secs_to_human_time (total_download_time),
- retr_rate (total_downloaded_bytes, total_download_time));
+ _("FINISHED --%s--\nTotal wall clock time: %s\n"
+ "Downloaded: %d files, %s in %s (%s)\n"),
+ datetime_str (time (NULL)),
+ wall_time,
+ numurls,
+ human_readable (total_downloaded_bytes),
+ download_time,
+ retr_rate (total_downloaded_bytes, total_download_time));
+ xfree (wall_time);
+ xfree (download_time);
+
/* Print quota warning, if exceeded. */
if (opt.quota && total_downloaded_bytes > opt.quota)
logprintf (LOG_NOTQUIET,