From 2cf87bea8baf5648745800c4b103ab04c05da9f9 Mon Sep 17 00:00:00 2001 From: hniksic Date: Tue, 18 Dec 2001 14:20:14 -0800 Subject: [PATCH] [svn] Fix crash introduced by previous patch. --- src/ChangeLog | 5 +++++ src/recur.c | 7 +------ 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/src/ChangeLog b/src/ChangeLog index 27d06271..dcdab473 100644 --- a/src/ChangeLog +++ b/src/ChangeLog @@ -1,3 +1,8 @@ +2001-12-18 Hrvoje Niksic + + * recur.c (retrieve_tree): Make a copy of file obtained from + dl_url_file_map because the code calls xfree(file) later. + 2001-12-18 Hrvoje Niksic * recur.c (register_html): Maintain a hash table of HTML files diff --git a/src/recur.c b/src/recur.c index 8930c91d..21054071 100644 --- a/src/recur.c +++ b/src/recur.c @@ -228,16 +228,11 @@ retrieve_tree (const char *start_url) the second time. */ if (dl_url_file_map && hash_table_contains (dl_url_file_map, url)) { - file = hash_table_get (dl_url_file_map, url); + file = xstrdup (hash_table_get (dl_url_file_map, url)); DEBUGP (("Already downloaded \"%s\", reusing it from \"%s\".\n", url, file)); - /* #### This check might be horribly slow when downloading - sites with a huge number of HTML docs. Use a hash table - instead! Thankfully, it gets tripped only when you use - `wget -r URL1 URL2 ...', as explained above. */ - if (string_set_contains (downloaded_html_set, file)) descend = 1; } -- 2.39.2