1 /* Conversion of links to local files.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007 Free Software Foundation, Inc.
4 This file is part of GNU Wget.
6 GNU Wget is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3 of the License, or
9 (at your option) any later version.
11 GNU Wget is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with Wget. If not, see <http://www.gnu.org/licenses/>.
19 In addition, as a special exception, the Free Software Foundation
20 gives permission to link the code of its release of Wget with the
21 OpenSSL project's "OpenSSL" library (or with modified versions of it
22 that use the same license as the "OpenSSL" library), and distribute
23 the linked executables. You must obey the GNU General Public License
24 in all respects for all of the code used other than "OpenSSL". If you
25 modify this file, you may extend this exception to your version of the
26 file, but you are not obligated to do so. If you do not wish to do
27 so, delete this exception statement from your version. */
36 #endif /* HAVE_UNISTD_H */
47 static struct hash_table *dl_file_url_map;
48 struct hash_table *dl_url_file_map;
50 /* Set of HTML files downloaded in this Wget run, used for link
51 conversion after Wget is done. */
52 struct hash_table *downloaded_html_set;
54 static void convert_links (const char *, struct urlpos *);
56 /* This function is called when the retrieval is done to convert the
57 links that have been downloaded. It has to be called at the end of
58 the retrieval, because only then does Wget know conclusively which
59 URLs have been downloaded, and which not, so it can tell which
60 direction to convert to.
62 The "direction" means that the URLs to the files that have been
63 downloaded get converted to the relative URL which will point to
64 that file. And the other URLs get converted to the remote URL on
67 All the downloaded HTMLs are kept in downloaded_html_files, and
68 downloaded URLs in urls_downloaded. All the information is
69 extracted from these two lists. */
72 convert_all_links (void)
78 struct ptimer *timer = ptimer_new ();
84 if (downloaded_html_set)
85 cnt = hash_table_count (downloaded_html_set);
88 file_array = alloca_array (char *, cnt);
89 string_set_to_array (downloaded_html_set, file_array);
91 for (i = 0; i < cnt; i++)
93 struct urlpos *urls, *cur_url;
95 char *file = file_array[i];
97 /* Determine the URL of the HTML file. get_urls_html will need
99 url = hash_table_get (dl_file_url_map, file);
102 DEBUGP (("Apparently %s has been removed.\n", file));
106 DEBUGP (("Scanning %s (from %s)\n", file, url));
108 /* Parse the HTML file... */
109 urls = get_urls_html (file, url, NULL);
111 /* We don't respect meta_disallow_follow here because, even if
112 the file is not followed, we might still want to convert the
113 links that have been followed from other files. */
115 for (cur_url = urls; cur_url; cur_url = cur_url->next)
118 struct url *u = cur_url->url;
120 if (cur_url->link_base_p)
122 /* Base references have been resolved by our parser, so
123 we turn the base URL into an empty string. (Perhaps
124 we should remove the tag entirely?) */
125 cur_url->convert = CO_NULLIFY_BASE;
129 /* We decide the direction of conversion according to whether
130 a URL was downloaded. Downloaded URLs will be converted
131 ABS2REL, whereas non-downloaded will be converted REL2ABS. */
132 local_name = hash_table_get (dl_url_file_map, u->url);
134 /* Decide on the conversion type. */
137 /* We've downloaded this URL. Convert it to relative
138 form. We do this even if the URL already is in
139 relative form, because our directory structure may
140 not be identical to that on the server (think `-nd',
141 `--cut-dirs', etc.) */
142 cur_url->convert = CO_CONVERT_TO_RELATIVE;
143 cur_url->local_name = xstrdup (local_name);
144 DEBUGP (("will convert url %s to local %s\n", u->url, local_name));
148 /* We haven't downloaded this URL. If it's not already
149 complete (including a full host name), convert it to
150 that form, so it can be reached while browsing this
152 if (!cur_url->link_complete_p)
153 cur_url->convert = CO_CONVERT_TO_COMPLETE;
154 cur_url->local_name = NULL;
155 DEBUGP (("will convert url %s to complete\n", u->url));
159 /* Convert the links in the file. */
160 convert_links (file, urls);
167 secs = ptimer_measure (timer);
168 ptimer_destroy (timer);
169 logprintf (LOG_VERBOSE, _("Converted %d files in %s seconds.\n"),
170 file_count, print_decimal (secs));
173 static void write_backup_file (const char *, downloaded_file_t);
174 static const char *replace_attr (const char *, int, FILE *, const char *);
175 static const char *replace_attr_refresh_hack (const char *, int, FILE *,
177 static char *local_quote_string (const char *);
178 static char *construct_relative (const char *, const char *);
180 /* Change the links in one HTML file. LINKS is a list of links in the
181 document, along with their positions and the desired direction of
184 convert_links (const char *file, struct urlpos *links)
186 struct file_memory *fm;
189 downloaded_file_t downloaded_file_return;
192 int to_url_count = 0, to_file_count = 0;
194 logprintf (LOG_VERBOSE, _("Converting %s... "), file);
197 /* First we do a "dry run": go through the list L and see whether
198 any URL needs to be converted in the first place. If not, just
199 leave the file alone. */
202 for (dry = links; dry; dry = dry->next)
203 if (dry->convert != CO_NOCONVERT)
207 logputs (LOG_VERBOSE, _("nothing to do.\n"));
212 fm = read_file (file);
215 logprintf (LOG_NOTQUIET, _("Cannot convert links in %s: %s\n"),
216 file, strerror (errno));
220 downloaded_file_return = downloaded_file (CHECK_FOR_FILE, file);
221 if (opt.backup_converted && downloaded_file_return)
222 write_backup_file (file, downloaded_file_return);
224 /* Before opening the file for writing, unlink the file. This is
225 important if the data in FM is mmaped. In such case, nulling the
226 file, which is what fopen() below does, would make us read all
227 zeroes from the mmaped region. */
228 if (unlink (file) < 0 && errno != ENOENT)
230 logprintf (LOG_NOTQUIET, _("Unable to delete `%s': %s\n"),
231 file, strerror (errno));
235 /* Now open the file for writing. */
236 fp = fopen (file, "wb");
239 logprintf (LOG_NOTQUIET, _("Cannot convert links in %s: %s\n"),
240 file, strerror (errno));
245 /* Here we loop through all the URLs in file, replacing those of
246 them that are downloaded with relative references. */
248 for (link = links; link; link = link->next)
250 char *url_start = fm->content + link->pos;
252 if (link->pos >= fm->length)
254 DEBUGP (("Something strange is going on. Please investigate."));
257 /* If the URL is not to be converted, skip it. */
258 if (link->convert == CO_NOCONVERT)
260 DEBUGP (("Skipping %s at position %d.\n", link->url->url, link->pos));
264 /* Echo the file contents, up to the offending URL's opening
265 quote, to the outfile. */
266 fwrite (p, 1, url_start - p, fp);
269 switch (link->convert)
271 case CO_CONVERT_TO_RELATIVE:
272 /* Convert absolute URL to relative. */
274 char *newname = construct_relative (file, link->local_name);
275 char *quoted_newname = local_quote_string (newname);
277 if (!link->link_refresh_p)
278 p = replace_attr (p, link->size, fp, quoted_newname);
280 p = replace_attr_refresh_hack (p, link->size, fp, quoted_newname,
281 link->refresh_timeout);
283 DEBUGP (("TO_RELATIVE: %s to %s at position %d in %s.\n",
284 link->url->url, newname, link->pos, file));
286 xfree (quoted_newname);
290 case CO_CONVERT_TO_COMPLETE:
291 /* Convert the link to absolute URL. */
293 char *newlink = link->url->url;
294 char *quoted_newlink = html_quote_string (newlink);
296 if (!link->link_refresh_p)
297 p = replace_attr (p, link->size, fp, quoted_newlink);
299 p = replace_attr_refresh_hack (p, link->size, fp, quoted_newlink,
300 link->refresh_timeout);
302 DEBUGP (("TO_COMPLETE: <something> to %s at position %d in %s.\n",
303 newlink, link->pos, file));
304 xfree (quoted_newlink);
308 case CO_NULLIFY_BASE:
309 /* Change the base href to "". */
310 p = replace_attr (p, link->size, fp, "");
318 /* Output the rest of the file. */
319 if (p - fm->content < fm->length)
320 fwrite (p, 1, fm->length - (p - fm->content), fp);
324 logprintf (LOG_VERBOSE, "%d-%d\n", to_file_count, to_url_count);
327 /* Construct and return a link that points from BASEFILE to LINKFILE.
328 Both files should be local file names, BASEFILE of the referrering
329 file, and LINKFILE of the referred file.
333 cr("foo", "bar") -> "bar"
334 cr("A/foo", "A/bar") -> "bar"
335 cr("A/foo", "A/B/bar") -> "B/bar"
336 cr("A/X/foo", "A/Y/bar") -> "../Y/bar"
337 cr("X/", "Y/bar") -> "../Y/bar" (trailing slash does matter in BASE)
339 Both files should be absolute or relative, otherwise strange
340 results might ensue. The function makes no special efforts to
341 handle "." and ".." in links, so make sure they're not there
342 (e.g. using path_simplify). */
345 construct_relative (const char *basefile, const char *linkfile)
352 /* First, skip the initial directory components common to both
355 for (b = basefile, l = linkfile; *b == *l && *b != '\0'; ++b, ++l)
358 start = (b - basefile) + 1;
363 /* With common directories out of the way, the situation we have is
365 b - b1/b2/[...]/bfile
366 l - l1/l2/[...]/lfile
368 The link we're constructing needs to be:
369 lnk - ../../l1/l2/[...]/lfile
371 Where the number of ".."'s equals the number of bN directory
374 /* Count the directory components in B. */
376 for (b = basefile; *b; b++)
382 /* Construct LINK as explained above. */
383 link = xmalloc (3 * basedirs + strlen (linkfile) + 1);
384 for (i = 0; i < basedirs; i++)
385 memcpy (link + 3 * i, "../", 3);
386 strcpy (link + 3 * i, linkfile);
390 /* Used by write_backup_file to remember which files have been
392 static struct hash_table *converted_files;
395 write_backup_file (const char *file, downloaded_file_t downloaded_file_return)
397 /* Rather than just writing over the original .html file with the
398 converted version, save the former to *.orig. Note we only do
399 this for files we've _successfully_ downloaded, so we don't
400 clobber .orig files sitting around from previous invocations. */
402 /* Construct the backup filename as the original name plus ".orig". */
403 size_t filename_len = strlen (file);
404 char* filename_plus_orig_suffix;
406 if (downloaded_file_return == FILE_DOWNLOADED_AND_HTML_EXTENSION_ADDED)
408 /* Just write "orig" over "html". We need to do it this way
409 because when we're checking to see if we've downloaded the
410 file before (to see if we can skip downloading it), we don't
411 know if it's a text/html file. Therefore we don't know yet
412 at that stage that -E is going to cause us to tack on
413 ".html", so we need to compare vs. the original URL plus
414 ".orig", not the original URL plus ".html.orig". */
415 filename_plus_orig_suffix = alloca (filename_len + 1);
416 strcpy (filename_plus_orig_suffix, file);
417 strcpy ((filename_plus_orig_suffix + filename_len) - 4, "orig");
419 else /* downloaded_file_return == FILE_DOWNLOADED_NORMALLY */
421 /* Append ".orig" to the name. */
422 filename_plus_orig_suffix = alloca (filename_len + sizeof (".orig"));
423 strcpy (filename_plus_orig_suffix, file);
424 strcpy (filename_plus_orig_suffix + filename_len, ".orig");
427 if (!converted_files)
428 converted_files = make_string_hash_table (0);
430 /* We can get called twice on the same URL thanks to the
431 convert_all_links() call in main(). If we write the .orig file
432 each time in such a case, it'll end up containing the first-pass
433 conversion, not the original file. So, see if we've already been
434 called on this file. */
435 if (!string_set_contains (converted_files, file))
437 /* Rename <file> to <file>.orig before former gets written over. */
438 if (rename (file, filename_plus_orig_suffix) != 0)
439 logprintf (LOG_NOTQUIET, _("Cannot back up %s as %s: %s\n"),
440 file, filename_plus_orig_suffix, strerror (errno));
442 /* Remember that we've already written a .orig backup for this file.
443 Note that we never free this memory since we need it till the
444 convert_all_links() call, which is one of the last things the
445 program does before terminating. BTW, I'm not sure if it would be
446 safe to just set 'converted_file_ptr->string' to 'file' below,
447 rather than making a copy of the string... Another note is that I
448 thought I could just add a field to the urlpos structure saying
449 that we'd written a .orig file for this URL, but that didn't work,
450 so I had to make this separate list.
451 -- Dan Harkless <wget@harkless.org>
453 This [adding a field to the urlpos structure] didn't work
454 because convert_file() is called from convert_all_links at
455 the end of the retrieval with a freshly built new urlpos
457 -- Hrvoje Niksic <hniksic@xemacs.org>
459 string_set_add (converted_files, file);
463 static bool find_fragment (const char *, int, const char **, const char **);
465 /* Replace an attribute's original text with NEW_TEXT. */
468 replace_attr (const char *p, int size, FILE *fp, const char *new_text)
470 bool quote_flag = false;
471 char quote_char = '\"'; /* use "..." for quoting, unless the
472 original value is quoted, in which
473 case reuse its quoting char. */
474 const char *frag_beg, *frag_end;
476 /* Structure of our string is:
478 <--- size ---> (with quotes)
481 <--- size --> (no quotes) */
483 if (*p == '\"' || *p == '\'')
488 size -= 2; /* disregard opening and closing quote */
490 putc (quote_char, fp);
491 fputs (new_text, fp);
493 /* Look for fragment identifier, if any. */
494 if (find_fragment (p, size, &frag_beg, &frag_end))
495 fwrite (frag_beg, 1, frag_end - frag_beg, fp);
499 putc (quote_char, fp);
504 /* The same as REPLACE_ATTR, but used when replacing
505 <meta http-equiv=refresh content="new_text"> because we need to
506 append "timeout_value; URL=" before the next_text. */
509 replace_attr_refresh_hack (const char *p, int size, FILE *fp,
510 const char *new_text, int timeout)
513 char *new_with_timeout = (char *)alloca (numdigit (timeout)
517 sprintf (new_with_timeout, "%d; URL=%s", timeout, new_text);
519 return replace_attr (p, size, fp, new_with_timeout);
522 /* Find the first occurrence of '#' in [BEG, BEG+SIZE) that is not
523 preceded by '&'. If the character is not found, return zero. If
524 the character is found, return true and set BP and EP to point to
525 the beginning and end of the region.
527 This is used for finding the fragment indentifiers in URLs. */
530 find_fragment (const char *beg, int size, const char **bp, const char **ep)
532 const char *end = beg + size;
533 bool saw_amp = false;
534 for (; beg < end; beg++)
556 /* Quote FILE for use as local reference to an HTML file.
558 We quote ? as %3F to avoid passing part of the file name as the
559 parameter when browsing the converted file through HTTP. However,
560 it is safe to do this only when `--html-extension' is turned on.
561 This is because converting "index.html?foo=bar" to
562 "index.html%3Ffoo=bar" would break local browsing, as the latter
563 isn't even recognized as an HTML file! However, converting
564 "index.html?foo=bar.html" to "index.html%3Ffoo=bar.html" should be
565 safe for both local and HTTP-served browsing.
567 We always quote "#" as "%23" and "%" as "%25" because those
568 characters have special meanings in URLs. */
571 local_quote_string (const char *file)
576 char *any = strpbrk (file, "?#%");
578 return html_quote_string (file);
580 /* Allocate space assuming the worst-case scenario, each character
581 having to be quoted. */
582 to = newname = (char *)alloca (3 * strlen (file) + 1);
583 for (from = file; *from; from++)
597 if (opt.html_extension)
610 return html_quote_string (newname);
613 /* Book-keeping code for dl_file_url_map, dl_url_file_map,
614 downloaded_html_list, and downloaded_html_set. Other code calls
615 these functions to let us know that a file has been downloaded. */
617 #define ENSURE_TABLES_EXIST do { \
618 if (!dl_file_url_map) \
619 dl_file_url_map = make_string_hash_table (0); \
620 if (!dl_url_file_map) \
621 dl_url_file_map = make_string_hash_table (0); \
624 /* Return true if S1 and S2 are the same, except for "/index.html".
625 The three cases in which it returns one are (substitute any
626 substring for "foo"):
628 m("foo/index.html", "foo/") ==> 1
629 m("foo/", "foo/index.html") ==> 1
630 m("foo", "foo/index.html") ==> 1
631 m("foo", "foo/" ==> 1
632 m("foo", "foo") ==> 1 */
635 match_except_index (const char *s1, const char *s2)
640 /* Skip common substring. */
641 for (i = 0; *s1 && *s2 && *s1 == *s2; s1++, s2++, i++)
644 /* Strings differ at the very beginning -- bail out. We need to
645 check this explicitly to avoid `lng - 1' reading outside the
650 /* Both strings hit EOF -- strings are equal. */
653 /* Strings are randomly different, e.g. "/foo/bar" and "/foo/qux". */
656 /* S1 is the longer one. */
659 /* S2 is the longer one. */
663 /* foo/index.html */ /* or */ /* foo/index.html */
667 /* The right-hand case. */
670 if (*lng == '/' && *(lng + 1) == '\0')
675 return 0 == strcmp (lng, "/index.html");
679 dissociate_urls_from_file_mapper (void *key, void *value, void *arg)
681 char *mapping_url = (char *)key;
682 char *mapping_file = (char *)value;
683 char *file = (char *)arg;
685 if (0 == strcmp (mapping_file, file))
687 hash_table_remove (dl_url_file_map, mapping_url);
689 xfree (mapping_file);
692 /* Continue mapping. */
696 /* Remove all associations from various URLs to FILE from dl_url_file_map. */
699 dissociate_urls_from_file (const char *file)
701 /* Can't use hash_table_iter_* because the table mutates while mapping. */
702 hash_table_for_each (dl_url_file_map, dissociate_urls_from_file_mapper,
706 /* Register that URL has been successfully downloaded to FILE. This
707 is used by the link conversion code to convert references to URLs
708 to references to local files. It is also being used to check if a
709 URL has already been downloaded. */
712 register_download (const char *url, const char *file)
714 char *old_file, *old_url;
718 /* With some forms of retrieval, it is possible, although not likely
719 or particularly desirable. If both are downloaded, the second
720 download will override the first one. When that happens,
721 dissociate the old file name from the URL. */
723 if (hash_table_get_pair (dl_file_url_map, file, &old_file, &old_url))
725 if (0 == strcmp (url, old_url))
726 /* We have somehow managed to download the same URL twice.
730 if (match_except_index (url, old_url)
731 && !hash_table_contains (dl_url_file_map, url))
732 /* The two URLs differ only in the "index.html" ending. For
733 example, one is "http://www.server.com/", and the other is
734 "http://www.server.com/index.html". Don't remove the old
735 one, just add the new one as a non-canonical entry. */
738 hash_table_remove (dl_file_url_map, file);
742 /* Remove all the URLs that point to this file. Yes, there can
743 be more than one such URL, because we store redirections as
744 multiple entries in dl_url_file_map. For example, if URL1
745 redirects to URL2 which gets downloaded to FILE, we map both
746 URL1 and URL2 to FILE in dl_url_file_map. (dl_file_url_map
747 only points to URL2.) When another URL gets loaded to FILE,
748 we want both URL1 and URL2 dissociated from it.
750 This is a relatively expensive operation because it performs
751 a linear search of the whole hash table, but it should be
752 called very rarely, only when two URLs resolve to the same
753 file name, *and* the "<file>.1" extensions are turned off.
754 In other words, almost never. */
755 dissociate_urls_from_file (file);
758 hash_table_put (dl_file_url_map, xstrdup (file), xstrdup (url));
761 /* A URL->FILE mapping is not possible without a FILE->URL mapping.
762 If the latter were present, it should have been removed by the
763 above `if'. So we could write:
765 assert (!hash_table_contains (dl_url_file_map, url));
767 The above is correct when running in recursive mode where the
768 same URL always resolves to the same file. But if you do
773 then the first URL will resolve to "FILE", and the other to
774 "FILE.1". In that case, FILE.1 will not be found in
775 dl_file_url_map, but URL will still point to FILE in
777 if (hash_table_get_pair (dl_url_file_map, url, &old_url, &old_file))
779 hash_table_remove (dl_url_file_map, url);
784 hash_table_put (dl_url_file_map, xstrdup (url), xstrdup (file));
787 /* Register that FROM has been redirected to TO. This assumes that TO
788 is successfully downloaded and already registered using
789 register_download() above. */
792 register_redirection (const char *from, const char *to)
798 file = hash_table_get (dl_url_file_map, to);
799 assert (file != NULL);
800 if (!hash_table_contains (dl_url_file_map, from))
801 hash_table_put (dl_url_file_map, xstrdup (from), xstrdup (file));
804 /* Register that the file has been deleted. */
807 register_delete_file (const char *file)
809 char *old_url, *old_file;
813 if (!hash_table_get_pair (dl_file_url_map, file, &old_file, &old_url))
816 hash_table_remove (dl_file_url_map, file);
819 dissociate_urls_from_file (file);
822 /* Register that FILE is an HTML file that has been downloaded. */
825 register_html (const char *url, const char *file)
827 if (!downloaded_html_set)
828 downloaded_html_set = make_string_hash_table (0);
829 string_set_add (downloaded_html_set, file);
832 static void downloaded_files_free (void);
834 /* Cleanup the data structures associated with this file. */
837 convert_cleanup (void)
841 free_keys_and_values (dl_file_url_map);
842 hash_table_destroy (dl_file_url_map);
843 dl_file_url_map = NULL;
847 free_keys_and_values (dl_url_file_map);
848 hash_table_destroy (dl_url_file_map);
849 dl_url_file_map = NULL;
851 if (downloaded_html_set)
852 string_set_free (downloaded_html_set);
853 downloaded_files_free ();
855 string_set_free (converted_files);
858 /* Book-keeping code for downloaded files that enables extension
861 /* This table should really be merged with dl_file_url_map and
862 downloaded_html_files. This was originally a list, but I changed
863 it to a hash table beause it was actually taking a lot of time to
864 find things in it. */
866 static struct hash_table *downloaded_files_hash;
868 /* We're storing "modes" of type downloaded_file_t in the hash table.
869 However, our hash tables only accept pointers for keys and values.
870 So when we need a pointer, we use the address of a
871 downloaded_file_t variable of static storage. */
873 static downloaded_file_t *
874 downloaded_mode_to_ptr (downloaded_file_t mode)
876 static downloaded_file_t
877 v1 = FILE_NOT_ALREADY_DOWNLOADED,
878 v2 = FILE_DOWNLOADED_NORMALLY,
879 v3 = FILE_DOWNLOADED_AND_HTML_EXTENSION_ADDED,
884 case FILE_NOT_ALREADY_DOWNLOADED:
886 case FILE_DOWNLOADED_NORMALLY:
888 case FILE_DOWNLOADED_AND_HTML_EXTENSION_ADDED:
896 /* Remembers which files have been downloaded. In the standard case,
897 should be called with mode == FILE_DOWNLOADED_NORMALLY for each
898 file we actually download successfully (i.e. not for ones we have
899 failures on or that we skip due to -N).
901 When we've downloaded a file and tacked on a ".html" extension due
902 to -E, call this function with
903 FILE_DOWNLOADED_AND_HTML_EXTENSION_ADDED rather than
904 FILE_DOWNLOADED_NORMALLY.
906 If you just want to check if a file has been previously added
907 without adding it, call with mode == CHECK_FOR_FILE. Please be
908 sure to call this function with local filenames, not remote
912 downloaded_file (downloaded_file_t mode, const char *file)
914 downloaded_file_t *ptr;
916 if (mode == CHECK_FOR_FILE)
918 if (!downloaded_files_hash)
919 return FILE_NOT_ALREADY_DOWNLOADED;
920 ptr = hash_table_get (downloaded_files_hash, file);
922 return FILE_NOT_ALREADY_DOWNLOADED;
926 if (!downloaded_files_hash)
927 downloaded_files_hash = make_string_hash_table (0);
929 ptr = hash_table_get (downloaded_files_hash, file);
933 ptr = downloaded_mode_to_ptr (mode);
934 hash_table_put (downloaded_files_hash, xstrdup (file), ptr);
936 return FILE_NOT_ALREADY_DOWNLOADED;
940 downloaded_files_free (void)
942 if (downloaded_files_hash)
944 hash_table_iterator iter;
945 for (hash_table_iterate (downloaded_files_hash, &iter);
946 hash_table_iter_next (&iter);
949 hash_table_destroy (downloaded_files_hash);
950 downloaded_files_hash = NULL;
954 /* The function returns the pointer to the malloc-ed quoted version of
955 string s. It will recognize and quote numeric and special graphic
956 entities, as per RFC1866:
964 No other entities are recognized or replaced. */
966 html_quote_string (const char *s)
972 /* Pass through the string, and count the new size. */
973 for (i = 0; *s; s++, i++)
977 else if (*s == '<' || *s == '>')
978 i += 3; /* `lt;' and `gt;' */
980 i += 5; /* `quot;' */
984 res = xmalloc (i + 1);
986 for (p = res; *s; s++)
999 *p++ = (*s == '<' ? 'l' : 'g');