]> sjero.net Git - wget/commitdiff
[svn] Fix crash introduced by previous patch.
authorhniksic <devnull@localhost>
Tue, 18 Dec 2001 22:20:14 +0000 (14:20 -0800)
committerhniksic <devnull@localhost>
Tue, 18 Dec 2001 22:20:14 +0000 (14:20 -0800)
src/ChangeLog
src/recur.c

index 27d062719582fb46d3c6ff001b143735fd873ab6..dcdab47383a19e6d9194c8129edda617af77e3ab 100644 (file)
@@ -1,3 +1,8 @@
+2001-12-18  Hrvoje Niksic  <hniksic@arsdigita.com>
+
+       * recur.c (retrieve_tree): Make a copy of file obtained from
+       dl_url_file_map because the code calls xfree(file) later.
+
 2001-12-18  Hrvoje Niksic  <hniksic@arsdigita.com>
 
        * recur.c (register_html): Maintain a hash table of HTML files
index 8930c91d4196769decdbd2cccbfd2505db23eeb8..2105407163bc4464da0ee1e7fe7c616486972e9e 100644 (file)
@@ -228,16 +228,11 @@ retrieve_tree (const char *start_url)
         the second time.  */
       if (dl_url_file_map && hash_table_contains (dl_url_file_map, url))
        {
-         file = hash_table_get (dl_url_file_map, url);
+         file = xstrdup (hash_table_get (dl_url_file_map, url));
 
          DEBUGP (("Already downloaded \"%s\", reusing it from \"%s\".\n",
                   url, file));
 
-         /* #### This check might be horribly slow when downloading
-            sites with a huge number of HTML docs.  Use a hash table
-            instead!  Thankfully, it gets tripped only when you use
-            `wget -r URL1 URL2 ...', as explained above.  */
-
          if (string_set_contains (downloaded_html_set, file))
            descend = 1;
        }