X-Git-Url: http://sjero.net/git/?p=wget;a=blobdiff_plain;f=src%2Foptions.h;h=e107fb09987a1a9cbf1dec15537102cc475ba1d8;hp=34e9991cbfa95595b3b578f708078e75f05bcabb;hb=ad75dd2662b11cb806ed92ba194dd04f85c68ba2;hpb=b2be7522c745827b521a8ed535201427df32eec7 diff --git a/src/options.h b/src/options.h index 34e9991c..e107fb09 100644 --- a/src/options.h +++ b/src/options.h @@ -35,6 +35,7 @@ struct options int verbose; /* Are we verbose? */ int quiet; /* Are we quiet? */ int ntry; /* Number of tries per URL */ + int retry_connrefused; /* Treat CONNREFUSED as non-fatal. */ int background; /* Whether we should work in background. */ int kill_longer; /* Do we reject messages with *more* data than specified in @@ -69,6 +70,7 @@ struct options char **domains; /* See host.c */ char **exclude_domains; + int dns_cache; /* whether we cache DNS lookups. */ char **follow_tags; /* List of HTML tags to recursively follow. */ char **ignore_tags; /* List of HTML tags to ignore if recursing. */ @@ -105,21 +107,20 @@ struct options char *progress_type; /* progress indicator type. */ char *proxy_user; /*oli*/ char *proxy_passwd; -#ifdef HAVE_SELECT - long timeout; /* The value of read timeout in - seconds. */ -#endif + + double read_timeout; /* The read/write timeout. */ + double dns_timeout; /* The DNS timeout. */ + double connect_timeout; /* The connect timeout. */ + int random_wait; /* vary from 0 .. wait secs by random()? */ - long wait; /* The wait period between retrievals. */ - long waitretry; /* The wait period between retries. - HEH */ + double wait; /* The wait period between retrievals. */ + double waitretry; /* The wait period between retries. - HEH */ int use_robots; /* Do we heed robots.txt? */ long limit_rate; /* Limit the download rate to this many bps. */ - long quota; /* Maximum number of bytes to - retrieve. */ - VERY_LONG_TYPE downloaded; /* How much we downloaded already. */ - int downloaded_overflow; /* Whether the above overflowed. */ + LARGE_INT quota; /* Maximum file size to download and + store. */ int numurls; /* Number of successfully downloaded URLs */ @@ -127,9 +128,9 @@ struct options int save_headers; /* Do we save headers together with file? */ -#ifdef DEBUG +#ifdef ENABLE_DEBUG int debug; /* Debugging on/off */ -#endif /* DEBUG */ +#endif int timestamping; /* Whether to use time-stamping. */ @@ -176,12 +177,28 @@ struct options int sslprotocol; /* 0 = auto / 1 = v2 / 2 = v3 / 3 = TLSv1 */ #endif /* HAVE_SSL */ - int cookies; - char *cookies_input; - char *cookies_output; + int cookies; /* whether cookies are used. */ + char *cookies_input; /* file we're loading the cookies from. */ + char *cookies_output; /* file we're saving the cookies to. */ + int keep_session_cookies; /* whether session cookies should be + saved and loaded. */ char *post_data; /* POST query string */ char *post_file_name; /* File to post */ + + enum { + restrict_unix, + restrict_windows + } restrict_files_os; /* file name restriction ruleset. */ + int restrict_files_ctrl; /* non-zero if control chars in URLs + are restricted from appearing in + generated file names. */ + + int strict_comments; /* whether strict SGML comments are + enforced. */ + + int preserve_perm; /* whether remote permissions are used + or that what is set by umask. */ }; extern struct options opt;