[BACK]Return to mark.c CVS log [TXT][DIR] Up to [local] / OpenXM_contrib2 / asir2000 / gc

Diff for /OpenXM_contrib2/asir2000/gc/mark.c between version 1.4 and 1.7

version 1.4, 2001/04/20 07:39:19 version 1.7, 2003/06/24 05:11:33
Line 19 
Line 19 
 # include <stdio.h>  # include <stdio.h>
 # include "private/gc_pmark.h"  # include "private/gc_pmark.h"
   
   #if defined(MSWIN32) && defined(__GNUC__)
   # include <excpt.h>
   #endif
   
 /* We put this here to minimize the risk of inlining. */  /* We put this here to minimize the risk of inlining. */
 /*VARARGS*/  /*VARARGS*/
 #ifdef __WATCOMC__  #ifdef __WATCOMC__
Line 261  static void alloc_mark_stack();
Line 265  static void alloc_mark_stack();
 /* remains valid until all marking is complete.         */  /* remains valid until all marking is complete.         */
 /* A zero value indicates that it's OK to miss some     */  /* A zero value indicates that it's OK to miss some     */
 /* register values.                                     */  /* register values.                                     */
 GC_bool GC_mark_some(cold_gc_frame)  /* We hold the allocation lock.  In the case of         */
 ptr_t cold_gc_frame;  /* incremental collection, the world may not be stopped.*/
 {  
 #ifdef MSWIN32  #ifdef MSWIN32
   /* Windows 98 appears to asynchronously create and remove writable    */    /* For win32, this is called after we establish a structured  */
   /* memory mappings, for reasons we haven't yet understood.  Since     */    /* exception handler, in case Windows unmaps one of our root  */
   /* we look for writable regions to determine the root set, we may     */    /* segments.  See below.  In either case, we acquire the      */
   /* try to mark from an address range that disappeared since we        */    /* allocator lock long before we get here.                    */
   /* started the collection.  Thus we have to recover from faults here. */    GC_bool GC_mark_some_inner(cold_gc_frame)
   /* This code does not appear to be necessary for Windows 95/NT/2000.  */    ptr_t cold_gc_frame;
   /* Note that this code should never generate an incremental GC write  */  #else
   /* fault.                                                             */    GC_bool GC_mark_some(cold_gc_frame)
   __try {    ptr_t cold_gc_frame;
 #endif  #endif
   {
     switch(GC_mark_state) {      switch(GC_mark_state) {
         case MS_NONE:          case MS_NONE:
             return(FALSE);              return(FALSE);
Line 395  ptr_t cold_gc_frame;
Line 399  ptr_t cold_gc_frame;
             ABORT("GC_mark_some: bad state");              ABORT("GC_mark_some: bad state");
             return(FALSE);              return(FALSE);
     }      }
   }
   
   
 #ifdef MSWIN32  #ifdef MSWIN32
   } __except (GetExceptionCode() == EXCEPTION_ACCESS_VIOLATION ?  
             EXCEPTION_EXECUTE_HANDLER : EXCEPTION_CONTINUE_SEARCH) {  # ifdef __GNUC__
 #   ifdef CONDPRINT  
       if (GC_print_stats) {      typedef struct {
         GC_printf0("Caught ACCESS_VIOLATION in marker. "        EXCEPTION_REGISTRATION ex_reg;
                    "Memory mapping disappeared.\n");        void *alt_path;
       } ext_ex_regn;
   
   
       static EXCEPTION_DISPOSITION mark_ex_handler(
           struct _EXCEPTION_RECORD *ex_rec,
           void *est_frame,
           struct _CONTEXT *context,
           void *disp_ctxt)
       {
           if (ex_rec->ExceptionCode == STATUS_ACCESS_VIOLATION) {
             ext_ex_regn *xer = (ext_ex_regn *)est_frame;
   
             /* Unwind from the inner function assuming the standard */
             /* function prologue.                                   */
             /* Assumes code has not been compiled with              */
             /* -fomit-frame-pointer.                                */
             context->Esp = context->Ebp;
             context->Ebp = *((DWORD *)context->Esp);
             context->Esp = context->Esp - 8;
   
             /* Resume execution at the "real" handler within the    */
             /* wrapper function.                                    */
             context->Eip = (DWORD )(xer->alt_path);
   
             return ExceptionContinueExecution;
   
           } else {
               return ExceptionContinueSearch;
           }
       }
   # endif /* __GNUC__ */
   
   
     GC_bool GC_mark_some(cold_gc_frame)
     ptr_t cold_gc_frame;
     {
         GC_bool ret_val;
   
   #   ifndef __GNUC__
         /* Windows 98 appears to asynchronously create and remove  */
         /* writable memory mappings, for reasons we haven't yet    */
         /* understood.  Since we look for writable regions to      */
         /* determine the root set, we may try to mark from an      */
         /* address range that disappeared since we started the     */
         /* collection.  Thus we have to recover from faults here.  */
         /* This code does not appear to be necessary for Windows   */
         /* 95/NT/2000. Note that this code should never generate   */
         /* an incremental GC write fault.                          */
   
         __try {
   
   #   else /* __GNUC__ */
   
         /* Manually install an exception handler since GCC does    */
         /* not yet support Structured Exception Handling (SEH) on  */
         /* Win32.                                                  */
   
         ext_ex_regn er;
   
         er.alt_path = &&handle_ex;
         er.ex_reg.handler = mark_ex_handler;
         asm volatile ("movl %%fs:0, %0" : "=r" (er.ex_reg.prev));
         asm volatile ("movl %0, %%fs:0" : : "r" (&er));
   
   #   endif /* __GNUC__ */
   
             ret_val = GC_mark_some_inner(cold_gc_frame);
   
   #   ifndef __GNUC__
   
         } __except (GetExceptionCode() == EXCEPTION_ACCESS_VIOLATION ?
                   EXCEPTION_EXECUTE_HANDLER : EXCEPTION_CONTINUE_SEARCH) {
   
   #   else /* __GNUC__ */
   
             /* Prevent GCC from considering the following code unreachable */
             /* and thus eliminating it.                                    */
             if (er.alt_path != 0)
                 goto rm_handler;
   
   handle_ex:
             /* Execution resumes from here on an access violation. */
   
   #   endif /* __GNUC__ */
   
   #         ifdef CONDPRINT
               if (GC_print_stats) {
                 GC_printf0("Caught ACCESS_VIOLATION in marker. "
                            "Memory mapping disappeared.\n");
               }
   #         endif /* CONDPRINT */
   
             /* We have bad roots on the stack.  Discard mark stack.  */
             /* Rescan from marked objects.  Redetermine roots.     */
             GC_invalidate_mark_state();
             scan_ptr = 0;
   
             ret_val = FALSE;
   
   #   ifndef __GNUC__
   
       }        }
 #   endif /* CONDPRINT */  
     /* We have bad roots on the stack.  Discard mark stack.     */  #   else /* __GNUC__ */
     /* Rescan from marked objects.  Redetermine roots.          */  
     GC_invalidate_mark_state();  rm_handler:
     scan_ptr = 0;        /* Uninstall the exception handler */
     return FALSE;        asm volatile ("mov %0, %%fs:0" : : "r" (er.ex_reg.prev));
   
   #   endif /* __GNUC__ */
   
         return ret_val;
   }    }
 #endif /* MSWIN32 */  #endif /* MSWIN32 */
 }  
   
   
 GC_bool GC_mark_stack_empty()  GC_bool GC_mark_stack_empty()
Line 427  GC_bool GC_mark_stack_empty()
Line 538  GC_bool GC_mark_stack_empty()
 #endif  #endif
   
 /* Given a pointer to someplace other than a small object page or the   */  /* Given a pointer to someplace other than a small object page or the   */
 /* first page of a large object, return a pointer either to the         */  /* first page of a large object, either:                                */
 /* start of the large object or NIL.                                    */  /*      - return a pointer to somewhere in the first page of the large  */
 /* In the latter case black list the address current.                   */  /*        object, if current points to a large object.                  */
 /* Returns NIL without black listing if current points to a block       */  /*        In this case *hhdr is replaced with a pointer to the header   */
 /* with IGNORE_OFF_PAGE set.                                            */  /*        for the large object.                                         */
   /*      - just return current if it does not point to a large object.   */
 /*ARGSUSED*/  /*ARGSUSED*/
 # ifdef PRINT_BLACK_LIST  ptr_t GC_find_start(current, hhdr, new_hdr_p)
   ptr_t GC_find_start(current, hhdr, source)  
   word source;  
 # else  
   ptr_t GC_find_start(current, hhdr)  
 # define source 0  
 # endif  
 register ptr_t current;  register ptr_t current;
 register hdr * hhdr;  register hdr *hhdr, **new_hdr_p;
 {  {
     if (GC_all_interior_pointers) {      if (GC_all_interior_pointers) {
         if (hhdr != 0) {          if (hhdr != 0) {
Line 457  register hdr * hhdr;
Line 563  register hdr * hhdr;
             if ((word *)orig - (word *)current              if ((word *)orig - (word *)current
                  >= (ptrdiff_t)(hhdr->hb_sz)) {                   >= (ptrdiff_t)(hhdr->hb_sz)) {
                 /* Pointer past the end of the block */                  /* Pointer past the end of the block */
                 GC_ADD_TO_BLACK_LIST_NORMAL((word)orig, source);                  return(orig);
                 return(0);  
             }              }
               *new_hdr_p = hhdr;
             return(current);              return(current);
         } else {          } else {
             GC_ADD_TO_BLACK_LIST_NORMAL((word)current, source);              return(current);
             return(0);  
         }          }
     } else {      } else {
         GC_ADD_TO_BLACK_LIST_NORMAL((word)current, source);          return(current);
         return(0);  
     }      }
 #   undef source  
 }  }
   
 void GC_invalidate_mark_state()  void GC_invalidate_mark_state()
Line 547  mse * mark_stack_limit;
Line 650  mse * mark_stack_limit;
           /* Large length.                                              */            /* Large length.                                              */
           /* Process part of the range to avoid pushing too much on the */            /* Process part of the range to avoid pushing too much on the */
           /* stack.                                                     */            /* stack.                                                     */
             GC_ASSERT(descr < (word)GC_greatest_plausible_heap_addr
                               - (word)GC_least_plausible_heap_addr);
 #         ifdef PARALLEL_MARK  #         ifdef PARALLEL_MARK
 #           define SHARE_BYTES 2048  #           define SHARE_BYTES 2048
             if (descr > SHARE_BYTES && GC_parallel              if (descr > SHARE_BYTES && GC_parallel
                 && mark_stack_top < mark_stack_limit - 1) {                  && mark_stack_top < mark_stack_limit - 1) {
               int new_size = (descr/2) & ~(sizeof(word)-1);                int new_size = (descr/2) & ~(sizeof(word)-1);
               GC_ASSERT(descr < GC_greatest_plausible_heap_addr  
                                 - GC_least_plausible_heap_addr);  
               mark_stack_top -> mse_start = current_p;                mark_stack_top -> mse_start = current_p;
               mark_stack_top -> mse_descr = new_size + sizeof(word);                mark_stack_top -> mse_descr = new_size + sizeof(word);
                                         /* makes sure we handle         */                                          /* makes sure we handle         */
Line 579  mse * mark_stack_limit;
Line 682  mse * mark_stack_limit;
           while (descr != 0) {            while (descr != 0) {
             if ((signed_word)descr < 0) {              if ((signed_word)descr < 0) {
               current = *current_p;                current = *current_p;
                 FIXUP_POINTER(current);
               if ((ptr_t)current >= least_ha && (ptr_t)current < greatest_ha) {                if ((ptr_t)current >= least_ha && (ptr_t)current < greatest_ha) {
                 PREFETCH(current);                  PREFETCH(current);
                 HC_PUSH_CONTENTS((ptr_t)current, mark_stack_top,                  HC_PUSH_CONTENTS((ptr_t)current, mark_stack_top,
Line 653  mse * mark_stack_limit;
Line 757  mse * mark_stack_limit;
           PREFETCH((ptr_t)limit - PREF_DIST*CACHE_LINE_SIZE);            PREFETCH((ptr_t)limit - PREF_DIST*CACHE_LINE_SIZE);
           GC_ASSERT(limit >= current_p);            GC_ASSERT(limit >= current_p);
           deferred = *limit;            deferred = *limit;
             FIXUP_POINTER(deferred);
           limit = (word *)((char *)limit - ALIGNMENT);            limit = (word *)((char *)limit - ALIGNMENT);
           if ((ptr_t)deferred >= least_ha && (ptr_t)deferred <  greatest_ha) {            if ((ptr_t)deferred >= least_ha && (ptr_t)deferred <  greatest_ha) {
             PREFETCH(deferred);              PREFETCH(deferred);
Line 662  mse * mark_stack_limit;
Line 767  mse * mark_stack_limit;
           /* Unroll once, so we don't do too many of the prefetches     */            /* Unroll once, so we don't do too many of the prefetches     */
           /* based on limit.                                            */            /* based on limit.                                            */
           deferred = *limit;            deferred = *limit;
             FIXUP_POINTER(deferred);
           limit = (word *)((char *)limit - ALIGNMENT);            limit = (word *)((char *)limit - ALIGNMENT);
           if ((ptr_t)deferred >= least_ha && (ptr_t)deferred <  greatest_ha) {            if ((ptr_t)deferred >= least_ha && (ptr_t)deferred <  greatest_ha) {
             PREFETCH(deferred);              PREFETCH(deferred);
Line 676  mse * mark_stack_limit;
Line 782  mse * mark_stack_limit;
         /* Since HC_PUSH_CONTENTS expands to a lot of code,     */          /* Since HC_PUSH_CONTENTS expands to a lot of code,     */
         /* we don't.                                            */          /* we don't.                                            */
         current = *current_p;          current = *current_p;
           FIXUP_POINTER(current);
         PREFETCH((ptr_t)current_p + PREF_DIST*CACHE_LINE_SIZE);          PREFETCH((ptr_t)current_p + PREF_DIST*CACHE_LINE_SIZE);
         if ((ptr_t)current >= least_ha && (ptr_t)current <  greatest_ha) {          if ((ptr_t)current >= least_ha && (ptr_t)current <  greatest_ha) {
           /* Prefetch the contents of the object we just pushed.  It's  */            /* Prefetch the contents of the object we just pushed.  It's  */
Line 727  mse * GC_steal_mark_stack(mse * low, mse * high, mse *
Line 834  mse * GC_steal_mark_stack(mse * low, mse * high, mse *
     mse *top = local - 1;      mse *top = local - 1;
     unsigned i = 0;      unsigned i = 0;
   
       /* Make sure that prior writes to the mark stack are visible. */
       /* On some architectures, the fact that the reads are         */
       /* volatile should suffice.                                   */
   #   if !defined(IA64) && !defined(HP_PA) && !defined(I386)
         GC_memory_barrier();
   #   endif
     GC_ASSERT(high >= low-1 && high - low + 1 <= GC_mark_stack_size);      GC_ASSERT(high >= low-1 && high - low + 1 <= GC_mark_stack_size);
     for (p = low; p <= high && i <= max; ++p) {      for (p = low; p <= high && i <= max; ++p) {
         word descr = *(volatile word *) &(p -> mse_descr);          word descr = *(volatile word *) &(p -> mse_descr);
           /* In the IA64 memory model, the following volatile store is    */
           /* ordered after this read of descr.  Thus a thread must read   */
           /* the original nonzero value.  HP_PA appears to be similar,    */
           /* and if I'm reading the P4 spec correctly, X86 is probably    */
           /* also OK.  In some other cases we need a barrier.             */
   #       if !defined(IA64) && !defined(HP_PA) && !defined(I386)
             GC_memory_barrier();
   #       endif
         if (descr != 0) {          if (descr != 0) {
             *(volatile word *) &(p -> mse_descr) = 0;              *(volatile word *) &(p -> mse_descr) = 0;
               /* More than one thread may get this entry, but that's only */
               /* a minor performance problem.                             */
             ++top;              ++top;
             top -> mse_descr = descr;              top -> mse_descr = descr;
             top -> mse_start = p -> mse_start;              top -> mse_start = p -> mse_start;
             GC_ASSERT(  top -> mse_descr & GC_DS_TAGS != GC_DS_LENGTH ||              GC_ASSERT(  top -> mse_descr & GC_DS_TAGS != GC_DS_LENGTH ||
                         top -> mse_descr < GC_greatest_plausible_heap_addr                          top -> mse_descr < GC_greatest_plausible_heap_addr
                                            - GC_least_plausible_heap_addr);                                             - GC_least_plausible_heap_addr);
             /* There is no synchronization here.  We assume that at     */  
             /* least one thread will see the original descriptor.       */  
             /* Otherwise we need a barrier.                             */  
             /* More than one thread may get this entry, but that's only */  
             /* a minor performance problem.                             */  
             /* If this is a big object, count it as                     */              /* If this is a big object, count it as                     */
             /* size/256 + 1 objects.                                    */              /* size/256 + 1 objects.                                    */
             ++i;              ++i;
Line 779  void GC_return_mark_stack(mse * low, mse * high)
Line 897  void GC_return_mark_stack(mse * low, mse * high)
       BCOPY(low, my_start, stack_size * sizeof(mse));        BCOPY(low, my_start, stack_size * sizeof(mse));
       GC_ASSERT(GC_mark_stack_top = my_top);        GC_ASSERT(GC_mark_stack_top = my_top);
 #     if !defined(IA64) && !defined(HP_PA)  #     if !defined(IA64) && !defined(HP_PA)
         GC_memory_write_barrier();          GC_memory_barrier();
 #     endif  #     endif
         /* On IA64, the volatile write acts as a release barrier. */          /* On IA64, the volatile write acts as a release barrier. */
       GC_mark_stack_top = my_top + stack_size;        GC_mark_stack_top = my_top + stack_size;
Line 839  long GC_markers = 2;  /* Normally changed by thread-li
Line 957  long GC_markers = 2;  /* Normally changed by thread-li
                                 /* -specific code.                      */                                  /* -specific code.                      */
   
 /* Mark using the local mark stack until the global mark stack is empty */  /* Mark using the local mark stack until the global mark stack is empty */
 /* and ther are no active workers.  Update GC_first_nonempty to reflect */  /* and there are no active workers. Update GC_first_nonempty to reflect */
 /* progress.                                                            */  /* progress.                                                            */
 /* Caller does not hold mark lock.                                      */  /* Caller does not hold mark lock.                                      */
 /* Caller has already incremented GC_helper_count.  We decrement it,    */  /* Caller has already incremented GC_helper_count.  We decrement it,    */
Line 919  void GC_mark_local(mse *local_mark_stack, int id)
Line 1037  void GC_mark_local(mse *local_mark_stack, int id)
                     return;                      return;
                 }                  }
                 /* else there's something on the stack again, or        */                  /* else there's something on the stack again, or        */
                 /* another help may push something.                     */                  /* another helper may push something.                   */
                 GC_active_count++;                  GC_active_count++;
                 GC_ASSERT(GC_active_count > 0);                  GC_ASSERT(GC_active_count > 0);
                 GC_release_mark_lock();                  GC_release_mark_lock();
Line 951  void GC_do_parallel_mark()
Line 1069  void GC_do_parallel_mark()
   
     GC_acquire_mark_lock();      GC_acquire_mark_lock();
     GC_ASSERT(I_HOLD_LOCK());      GC_ASSERT(I_HOLD_LOCK());
     GC_ASSERT(!GC_help_wanted);      /* This could be a GC_ASSERT, but it seems safer to keep it on      */
     GC_ASSERT(GC_active_count == 0);      /* all the time, especially since it's cheap.                       */
       if (GC_help_wanted || GC_active_count != 0 || GC_helper_count != 0)
           ABORT("Tried to start parallel mark in bad state");
 #   ifdef PRINTSTATS  #   ifdef PRINTSTATS
         GC_printf1("Starting marking for mark phase number %lu\n",          GC_printf1("Starting marking for mark phase number %lu\n",
                    (unsigned long)GC_mark_no);                     (unsigned long)GC_mark_no);
Line 1341  ptr_t top;
Line 1461  ptr_t top;
 #   define GC_least_plausible_heap_addr least_ha  #   define GC_least_plausible_heap_addr least_ha
   
     if (top == 0) return;      if (top == 0) return;
     /* check all pointers in range and put in push if they appear */      /* check all pointers in range and push if they appear      */
     /* to be valid.                                               */      /* to be valid.                                             */
       lim = t - 1 /* longword */;        lim = t - 1 /* longword */;
       for (p = b; p <= lim; p = (word *)(((char *)p) + ALIGNMENT)) {        for (p = b; p <= lim; p = (word *)(((char *)p) + ALIGNMENT)) {
         q = *p;          q = *p;
Line 1365  ptr_t bottom;
Line 1485  ptr_t bottom;
 ptr_t top;  ptr_t top;
 ptr_t cold_gc_frame;  ptr_t cold_gc_frame;
 {  {
   if (GC_all_interior_pointers) {    if (!NEED_FIXUP_POINTER && GC_all_interior_pointers) {
 #   define EAGER_BYTES 1024  #   define EAGER_BYTES 1024
     /* Push the hot end of the stack eagerly, so that register values   */      /* Push the hot end of the stack eagerly, so that register values   */
     /* saved inside GC frames are marked before they disappear.         */      /* saved inside GC frames are marked before they disappear.         */
Line 1374  ptr_t cold_gc_frame;
Line 1494  ptr_t cold_gc_frame;
         GC_push_all_stack(bottom, top);          GC_push_all_stack(bottom, top);
         return;          return;
     }      }
       GC_ASSERT(bottom <= cold_gc_frame && cold_gc_frame <= top);
 #   ifdef STACK_GROWS_DOWN  #   ifdef STACK_GROWS_DOWN
         GC_push_all_eager(bottom, cold_gc_frame);  
         GC_push_all(cold_gc_frame - sizeof(ptr_t), top);          GC_push_all(cold_gc_frame - sizeof(ptr_t), top);
           GC_push_all_eager(bottom, cold_gc_frame);
 #   else /* STACK_GROWS_UP */  #   else /* STACK_GROWS_UP */
         GC_push_all_eager(cold_gc_frame, top);  
         GC_push_all(bottom, cold_gc_frame + sizeof(ptr_t));          GC_push_all(bottom, cold_gc_frame + sizeof(ptr_t));
           GC_push_all_eager(cold_gc_frame, top);
 #   endif /* STACK_GROWS_UP */  #   endif /* STACK_GROWS_UP */
   } else {    } else {
     GC_push_all_eager(bottom, top);      GC_push_all_eager(bottom, top);
Line 1394  void GC_push_all_stack(bottom, top)
Line 1515  void GC_push_all_stack(bottom, top)
 ptr_t bottom;  ptr_t bottom;
 ptr_t top;  ptr_t top;
 {  {
   if (GC_all_interior_pointers) {    if (!NEED_FIXUP_POINTER && GC_all_interior_pointers) {
     GC_push_all(bottom, top);      GC_push_all(bottom, top);
   } else {    } else {
     GC_push_all_eager(bottom, top);      GC_push_all_eager(bottom, top);

Legend:
Removed from v.1.4  
changed lines
  Added in v.1.7

FreeBSD-CVSweb <freebsd-cvsweb@FreeBSD.org>