=================================================================== RCS file: /home/cvs/OpenXM_contrib2/asir2000/gc/alloc.c,v retrieving revision 1.1.1.1 retrieving revision 1.3 diff -u -p -r1.1.1.1 -r1.3 --- OpenXM_contrib2/asir2000/gc/alloc.c 1999/12/03 07:39:09 1.1.1.1 +++ OpenXM_contrib2/asir2000/gc/alloc.c 2000/12/01 09:26:10 1.3 @@ -1,7 +1,8 @@ /* * Copyright 1988, 1989 Hans-J. Boehm, Alan J. Demers - * Copyright (c) 1991-1994 by Xerox Corporation. All rights reserved. + * Copyright (c) 1991-1996 by Xerox Corporation. All rights reserved. * Copyright (c) 1998 by Silicon Graphics. All rights reserved. + * Copyright (c) 1999 by Hewlett-Packard Company. All rights reserved. * * THIS MATERIAL IS PROVIDED AS IS, WITH ABSOLUTELY NO WARRANTY EXPRESSED * OR IMPLIED. ANY USE IS AT YOUR OWN RISK. @@ -64,9 +65,15 @@ word GC_gc_no = 0; int GC_incremental = 0; /* By default, stop the world. */ #endif -int GC_full_freq = 4; /* Every 5th collection is a full */ - /* collection. */ +int GC_full_freq = 19; /* Every 20th collection is a full */ + /* collection, whether we need it */ + /* or not. */ +GC_bool GC_need_full_gc = FALSE; + /* Need full GC do to heap growth. */ + +word GC_used_heap_size_after_full = 0; + char * GC_copyright[] = {"Copyright 1988,1989 Hans-J. Boehm and Alan J. Demers ", "Copyright (c) 1991-1995 by Xerox Corporation. All rights reserved. ", @@ -213,6 +220,7 @@ GC_bool GC_should_collect() return(GC_adj_words_allocd() >= min_words_allocd()); } + void GC_notify_full_gc() { if (GC_start_call_back != (void (*)())0) { @@ -220,6 +228,8 @@ void GC_notify_full_gc() } } +GC_bool GC_is_full_gc = FALSE; + /* * Initiate a garbage collection if appropriate. * Choose judiciously @@ -229,7 +239,6 @@ void GC_notify_full_gc() void GC_maybe_gc() { static int n_partial_gcs = 0; - GC_bool is_full_gc = FALSE; if (GC_should_collect()) { if (!GC_incremental) { @@ -237,7 +246,7 @@ void GC_maybe_gc() GC_gcollect_inner(); n_partial_gcs = 0; return; - } else if (n_partial_gcs >= GC_full_freq) { + } else if (GC_need_full_gc || n_partial_gcs >= GC_full_freq) { # ifdef PRINTSTATS GC_printf2( "***>Full mark for collection %lu after %ld allocd bytes\n", @@ -249,7 +258,7 @@ void GC_maybe_gc() GC_clear_marks(); n_partial_gcs = 0; GC_notify_full_gc(); - is_full_gc = TRUE; + GC_is_full_gc = TRUE; } else { n_partial_gcs++; } @@ -263,7 +272,7 @@ void GC_maybe_gc() # endif GC_finish_collection(); } else { - if (!is_full_gc) { + if (!GC_is_full_gc) { /* Count this as the first attempt */ GC_n_attempts++; } @@ -300,7 +309,9 @@ GC_stop_func stop_func; /* Make sure all blocks have been reclaimed, so sweep routines */ /* don't see cleared mark bits. */ /* If we're guaranteed to finish, then this is unnecessary. */ - if (stop_func != GC_never_stop_func + /* In the find_leak case, we have to finish to guarantee that */ + /* previously unmarked objects are not reported as leaks. */ + if ((GC_find_leak || stop_func != GC_never_stop_func) && !GC_reclaim_all(stop_func, FALSE)) { /* Aborted. So far everything is still consistent. */ return(FALSE); @@ -310,6 +321,7 @@ GC_stop_func stop_func; # ifdef SAVE_CALL_CHAIN GC_save_callers(GC_last_stack); # endif + GC_is_full_gc = TRUE; if (!GC_stopped_mark(stop_func)) { if (!GC_incremental) { /* We're partially done and have no way to complete or use */ @@ -481,7 +493,7 @@ void GC_finish_collection() # ifdef GATHERSTATS GC_mem_found = 0; # endif -# ifdef FIND_LEAK + if (GC_find_leak) { /* Mark all objects on the free list. All objects should be */ /* marked when we're done. */ { @@ -504,25 +516,26 @@ void GC_finish_collection() } } } - /* Check that everything is marked */ GC_start_reclaim(TRUE); -# else + /* The above just checks; it doesn't really reclaim anything. */ + } - GC_finalize(); -# ifdef STUBBORN_ALLOC - GC_clean_changing_list(); -# endif + GC_finalize(); +# ifdef STUBBORN_ALLOC + GC_clean_changing_list(); +# endif -# ifdef PRINTTIMES - GET_TIME(finalize_time); -# endif +# ifdef PRINTTIMES + GET_TIME(finalize_time); +# endif - /* Clear free list mark bits, in case they got accidentally marked */ - /* Note: HBLKPTR(p) == pointer to head of block containing *p */ - /* Also subtract memory remaining from GC_mem_found count. */ - /* Note that composite objects on free list are cleared. */ - /* Thus accidentally marking a free list is not a problem; only */ - /* objects on the list itself will be marked, and that's fixed here. */ + /* Clear free list mark bits, in case they got accidentally marked */ + /* Note: HBLKPTR(p) == pointer to head of block containing *p */ + /* (or GC_find_leak is set and they were intentionally marked.) */ + /* Also subtract memory remaining from GC_mem_found count. */ + /* Note that composite objects on free list are cleared. */ + /* Thus accidentally marking a free list is not a problem; only */ + /* objects on the list itself will be marked, and that's fixed here. */ { register word size; /* current object size */ register ptr_t p; /* pointer to current object */ @@ -548,15 +561,20 @@ void GC_finish_collection() } -# ifdef PRINTSTATS +# ifdef PRINTSTATS GC_printf1("Bytes recovered before sweep - f.l. count = %ld\n", (long)WORDS_TO_BYTES(GC_mem_found)); -# endif - +# endif /* Reconstruct free lists to contain everything not marked */ - GC_start_reclaim(FALSE); - -# endif /* !FIND_LEAK */ + GC_start_reclaim(FALSE); + if (GC_is_full_gc) { + GC_used_heap_size_after_full = USED_HEAP_SIZE; + GC_need_full_gc = FALSE; + } else { + GC_need_full_gc = + BYTES_TO_WORDS(USED_HEAP_SIZE - GC_used_heap_size_after_full) + > min_words_allocd(); + } # ifdef PRINTSTATS GC_printf2( @@ -573,6 +591,7 @@ void GC_finish_collection() # endif GC_n_attempts = 0; + GC_is_full_gc = FALSE; /* Reset or increment counters for next cycle */ GC_words_allocd_before_gc += GC_words_allocd; GC_non_gc_bytes_at_gc = GC_non_gc_bytes; @@ -640,7 +659,8 @@ word bytes; if (GC_n_heap_sects >= MAX_HEAP_SECTS) { ABORT("Too many heap sections: Increase MAXHINCR or MAX_HEAP_SECTS"); } - if (!GC_install_header(p)) { + phdr = GC_install_header(p); + if (0 == phdr) { /* This is extremely unlikely. Can't add it. This will */ /* almost certainly result in a 0 return from the allocator, */ /* which is entirely appropriate. */ @@ -650,7 +670,6 @@ word bytes; GC_heap_sects[GC_n_heap_sects].hs_bytes = bytes; GC_n_heap_sects++; words = BYTES_TO_WORDS(bytes - HDR_BYTES); - phdr = HDR(p); phdr -> hb_sz = words; phdr -> hb_map = (char *)1; /* A value != GC_invalid_map */ phdr -> hb_flags = 0; @@ -669,27 +688,6 @@ word bytes; } } -#ifdef PRESERVE_LAST - -GC_bool GC_protect_last_block = FALSE; - -GC_bool GC_in_last_heap_sect(p) -ptr_t p; -{ - struct HeapSect * last_heap_sect; - ptr_t start; - ptr_t end; - - if (!GC_protect_last_block) return FALSE; - last_heap_sect = &(GC_heap_sects[GC_n_heap_sects-1]); - start = last_heap_sect -> hs_start; - if (p < start) return FALSE; - end = start + last_heap_sect -> hs_bytes; - if (p >= end) return FALSE; - return TRUE; -} -#endif - # if !defined(NO_DEBUGGING) void GC_print_heap_sects() { @@ -823,9 +821,7 @@ word n; LOCK(); if (!GC_is_initialized) GC_init_inner(); result = (int)GC_expand_hp_inner(divHBLKSZ((word)bytes)); -# ifdef PRESERVE_LAST - if (result) GC_protect_last_block = FALSE; -# endif + if (result) GC_requested_heapsize += bytes; UNLOCK(); ENABLE_SIGNALS(); return(result); @@ -839,7 +835,8 @@ GC_bool GC_collect_or_expand(needed_blocks, ignore_off word needed_blocks; GC_bool ignore_off_page; { - if (!GC_incremental && !GC_dont_gc && GC_should_collect()) { + if (!GC_incremental && !GC_dont_gc && + (GC_dont_expand && GC_words_allocd > 0 || GC_should_collect())) { GC_notify_full_gc(); GC_gcollect_inner(); } else { @@ -877,12 +874,6 @@ GC_bool ignore_off_page; GC_printf0("Memory available again ...\n"); } # endif -# ifdef PRESERVE_LAST - if (needed_blocks > 1) GC_protect_last_block = TRUE; - /* We were forced to expand the heap as the result */ - /* of a large block allocation. Avoid breaking up */ - /* new block into small pieces. */ -# endif } } return(TRUE);