⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 mark.c

📁 A garbage collector for C and C
💻 C
📖 第 1 页 / 共 4 页
字号:
      /* collection.  Thus we have to recover from faults here.  */      /* This code does not appear to be necessary for Windows   */      /* 95/NT/2000. Note that this code should never generate   */      /* an incremental GC write fault.                          */      __try {#   else /* __GNUC__ */      /* Manually install an exception handler since GCC does    */      /* not yet support Structured Exception Handling (SEH) on  */      /* Win32.                                                  */      ext_ex_regn er;      er.alt_path = &&handle_ex;      er.ex_reg.handler = mark_ex_handler;      asm volatile ("movl %%fs:0, %0" : "=r" (er.ex_reg.prev));      asm volatile ("movl %0, %%fs:0" : : "r" (&er));#   endif /* __GNUC__ */          ret_val = GC_mark_some_inner(cold_gc_frame);#   ifndef __GNUC__      } __except (GetExceptionCode() == EXCEPTION_ACCESS_VIOLATION ?                EXCEPTION_EXECUTE_HANDLER : EXCEPTION_CONTINUE_SEARCH) {#   else /* __GNUC__ */          /* Prevent GCC from considering the following code unreachable */          /* and thus eliminating it.                                    */          if (er.alt_path != 0)              goto rm_handler;handle_ex:          /* Execution resumes from here on an access violation. */#   endif /* __GNUC__ */#         ifdef CONDPRINT            if (GC_print_stats) {	      GC_printf0("Caught ACCESS_VIOLATION in marker. "		         "Memory mapping disappeared.\n");            }#         endif /* CONDPRINT */          /* We have bad roots on the stack.  Discard mark stack.  */          /* Rescan from marked objects.  Redetermine roots.	 */          GC_invalidate_mark_state();	          scan_ptr = 0;          ret_val = FALSE;#   ifndef __GNUC__      }#   else /* __GNUC__ */rm_handler:      /* Uninstall the exception handler */      asm volatile ("mov %0, %%fs:0" : : "r" (er.ex_reg.prev));#   endif /* __GNUC__ */      return ret_val;  }#endif /* MSWIN32 */GC_bool GC_mark_stack_empty(){    return(GC_mark_stack_top < GC_mark_stack);}	#ifdef PROF_MARKER    word GC_prof_array[10];#   define PROF(n) GC_prof_array[n]++#else#   define PROF(n)#endif/* Given a pointer to someplace other than a small object page or the	*//* first page of a large object, either:				*//*	- return a pointer to somewhere in the first page of the large	*//*	  object, if current points to a large object.			*//*	  In this case *hhdr is replaced with a pointer to the header	*//*	  for the large object.						*//*	- just return current if it does not point to a large object.	*//*ARGSUSED*/ptr_t GC_find_start(current, hhdr, new_hdr_p)register ptr_t current;register hdr *hhdr, **new_hdr_p;{    if (GC_all_interior_pointers) {	if (hhdr != 0) {	    register ptr_t orig = current;	    	    current = (ptr_t)HBLKPTR(current);	    do {	      current = current - HBLKSIZE*(word)hhdr;	      hhdr = HDR(current);	    } while(IS_FORWARDING_ADDR_OR_NIL(hhdr));	    /* current points to near the start of the large object */	    if (hhdr -> hb_flags & IGNORE_OFF_PAGE) return(orig);	    if ((word *)orig - (word *)current	         >= (ptrdiff_t)(hhdr->hb_sz)) {	        /* Pointer past the end of the block */	        return(orig);	    }	    *new_hdr_p = hhdr;	    return(current);	} else {	    return(current);        }    } else {        return(current);    }}void GC_invalidate_mark_state(){    GC_mark_state = MS_INVALID;    GC_mark_stack_top = GC_mark_stack-1;}mse * GC_signal_mark_stack_overflow(msp)mse * msp;{    GC_mark_state = MS_INVALID;    GC_mark_stack_too_small = TRUE;#   ifdef CONDPRINT      if (GC_print_stats) {	GC_printf1("Mark stack overflow; current size = %lu entries\n",	    	    GC_mark_stack_size);      }#   endif    return(msp - GC_MARK_STACK_DISCARDS);}/* * Mark objects pointed to by the regions described by * mark stack entries between GC_mark_stack and GC_mark_stack_top, * inclusive.  Assumes the upper limit of a mark stack entry * is never 0.  A mark stack entry never has size 0. * We try to traverse on the order of a hblk of memory before we return. * Caller is responsible for calling this until the mark stack is empty. * Note that this is the most performance critical routine in the * collector.  Hence it contains all sorts of ugly hacks to speed * things up.  In particular, we avoid procedure calls on the common * path, we take advantage of peculiarities of the mark descriptor * encoding, we optionally maintain a cache for the block address to * header mapping, we prefetch when an object is "grayed", etc.  */mse * GC_mark_from(mark_stack_top, mark_stack, mark_stack_limit)mse * mark_stack_top;mse * mark_stack;mse * mark_stack_limit;{  int credit = HBLKSIZE;	/* Remaining credit for marking work	*/  register word * current_p;	/* Pointer to current candidate ptr.	*/  register word current;	/* Candidate pointer.			*/  register word * limit;	/* (Incl) limit of current candidate 	*/  				/* range				*/  register word descr;  register ptr_t greatest_ha = GC_greatest_plausible_heap_addr;  register ptr_t least_ha = GC_least_plausible_heap_addr;  DECLARE_HDR_CACHE;# define SPLIT_RANGE_WORDS 128  /* Must be power of 2.		*/  GC_objects_are_marked = TRUE;  INIT_HDR_CACHE;# ifdef OS2 /* Use untweaked version to circumvent compiler problem */  while (mark_stack_top >= mark_stack && credit >= 0) {# else  while ((((ptr_t)mark_stack_top - (ptr_t)mark_stack) | credit)  	>= 0) {# endif    current_p = mark_stack_top -> mse_start;    descr = mark_stack_top -> mse_descr;  retry:    /* current_p and descr describe the current object.		*/    /* *mark_stack_top is vacant.				*/    /* The following is 0 only for small objects described by a simple	*/    /* length descriptor.  For many applications this is the common	*/    /* case, so we try to detect it quickly.				*/    if (descr & ((~(WORDS_TO_BYTES(SPLIT_RANGE_WORDS) - 1)) | GC_DS_TAGS)) {      word tag = descr & GC_DS_TAGS;            switch(tag) {        case GC_DS_LENGTH:          /* Large length.					        */          /* Process part of the range to avoid pushing too much on the	*/          /* stack.							*/	  GC_ASSERT(descr < (word)GC_greatest_plausible_heap_addr			    - (word)GC_least_plausible_heap_addr);#	  ifdef PARALLEL_MARK#	    define SHARE_BYTES 2048	    if (descr > SHARE_BYTES && GC_parallel		&& mark_stack_top < mark_stack_limit - 1) {	      int new_size = (descr/2) & ~(sizeof(word)-1);	      mark_stack_top -> mse_start = current_p;	      mark_stack_top -> mse_descr = new_size + sizeof(word);					/* makes sure we handle 	*/					/* misaligned pointers.		*/	      mark_stack_top++;	      current_p = (word *) ((char *)current_p + new_size);	      descr -= new_size;	      goto retry;	    }#	  endif /* PARALLEL_MARK */          mark_stack_top -> mse_start =         	limit = current_p + SPLIT_RANGE_WORDS-1;          mark_stack_top -> mse_descr =          		descr - WORDS_TO_BYTES(SPLIT_RANGE_WORDS-1);          /* Make sure that pointers overlapping the two ranges are	*/          /* considered. 						*/          limit = (word *)((char *)limit + sizeof(word) - ALIGNMENT);          break;        case GC_DS_BITMAP:          mark_stack_top--;          descr &= ~GC_DS_TAGS;          credit -= WORDS_TO_BYTES(WORDSZ/2); /* guess */          while (descr != 0) {            if ((signed_word)descr < 0) {              current = *current_p;	      FIXUP_POINTER(current);	      if ((ptr_t)current >= least_ha && (ptr_t)current < greatest_ha) {		PREFETCH((ptr_t)current);                HC_PUSH_CONTENTS((ptr_t)current, mark_stack_top,			      mark_stack_limit, current_p, exit1);	      }            }	    descr <<= 1;	    ++ current_p;          }          continue;        case GC_DS_PROC:          mark_stack_top--;          credit -= GC_PROC_BYTES;          mark_stack_top =              (*PROC(descr))              	    (current_p, mark_stack_top,              	    mark_stack_limit, ENV(descr));          continue;        case GC_DS_PER_OBJECT:	  if ((signed_word)descr >= 0) {	    /* Descriptor is in the object.	*/            descr = *(word *)((ptr_t)current_p + descr - GC_DS_PER_OBJECT);	  } else {	    /* Descriptor is in type descriptor pointed to by first	*/	    /* word in object.						*/	    ptr_t type_descr = *(ptr_t *)current_p;	    /* type_descr is either a valid pointer to the descriptor	*/	    /* structure, or this object was on a free list.  If it 	*/	    /* it was anything but the last object on the free list,	*/	    /* we will misinterpret the next object on the free list as */	    /* the type descriptor, and get a 0 GC descriptor, which	*/	    /* is ideal.  Unfortunately, we need to check for the last	*/	    /* object case explicitly.					*/	    if (0 == type_descr) {		/* Rarely executed.	*/		mark_stack_top--;		continue;	    }            descr = *(word *)(type_descr			      - (descr - (GC_DS_PER_OBJECT					  - GC_INDIR_PER_OBJ_BIAS)));	  }	  if (0 == descr) {	      /* Can happen either because we generated a 0 descriptor	*/	      /* or we saw a pointer to a free object.			*/	      mark_stack_top--;	      continue;	  }          goto retry;      }    } else /* Small object with length descriptor */ {      mark_stack_top--;      limit = (word *)(((ptr_t)current_p) + (word)descr);    }    /* The simple case in which we're scanning a range.	*/    GC_ASSERT(!((word)current_p & (ALIGNMENT-1)));    credit -= (ptr_t)limit - (ptr_t)current_p;    limit -= 1;    {#     define PREF_DIST 4#     ifndef SMALL_CONFIG        word deferred;	/* Try to prefetch the next pointer to be examined asap.	*/	/* Empirically, this also seems to help slightly without	*/	/* prefetches, at least on linux/X86.  Presumably this loop 	*/	/* ends up with less register pressure, and gcc thus ends up 	*/	/* generating slightly better code.  Overall gcc code quality	*/	/* for this loop is still not great.				*/	for(;;) {	  PREFETCH((ptr_t)limit - PREF_DIST*CACHE_LINE_SIZE);	  GC_ASSERT(limit >= current_p);	  deferred = *limit;	  FIXUP_POINTER(deferred);	  limit = (word *)((char *)limit - ALIGNMENT);	  if ((ptr_t)deferred >= least_ha && (ptr_t)deferred <  greatest_ha) {	    PREFETCH((ptr_t)deferred);	    break;	  }	  if (current_p > limit) goto next_object;	  /* Unroll once, so we don't do too many of the prefetches 	*/	  /* based on limit.						*/	  deferred = *limit;	  FIXUP_POINTER(deferred);	  limit = (word *)((char *)limit - ALIGNMENT);	  if ((ptr_t)deferred >= least_ha && (ptr_t)deferred <  greatest_ha) {	    PREFETCH((ptr_t)deferred);	    break;	  }	  if (current_p > limit) goto next_object;	}#     endif      while (current_p <= limit) {	/* Empirically, unrolling this loop doesn't help a lot.	*/	/* Since HC_PUSH_CONTENTS expands to a lot of code,	*/	/* we don't.						*/        current = *current_p;	FIXUP_POINTER(current);        PREFETCH((ptr_t)current_p + PREF_DIST*CACHE_LINE_SIZE);        if ((ptr_t)current >= least_ha && (ptr_t)current <  greatest_ha) {  	  /* Prefetch the contents of the object we just pushed.  It's	*/  	  /* likely we will need them soon.				*/  	  PREFETCH((ptr_t)current);          HC_PUSH_CONTENTS((ptr_t)current, mark_stack_top,  		           mark_stack_limit, current_p, exit2);        }        current_p = (word *)((char *)current_p + ALIGNMENT);      }#     ifndef SMALL_CONFIG	/* We still need to mark the entry we previously prefetched.	*/	/* We alrady know that it passes the preliminary pointer	*/	/* validity test.						*/        HC_PUSH_CONTENTS((ptr_t)deferred, mark_stack_top,  		         mark_stack_limit, current_p, exit4);	next_object:;#     endif    }  }  return mark_stack_top;}#ifdef PARALLEL_MARK/* We assume we have an ANSI C Compiler.	*/GC_bool GC_help_wanted = FALSE;unsigned GC_helper_count = 0;unsigned GC_active_count = 0;mse * VOLATILE GC_first_nonempty;word GC_mark_no = 0;#define LOCAL_MARK_STACK_SIZE HBLKSIZE	/* Under normal circumstances, this is big enough to guarantee	*/	/* We don't overflow half of it in a single call to 		*/	/* GC_mark_from.						*//* Steal mark stack entries starting at mse low into mark stack local	*//* until we either steal mse high, or we have max entries.		*//* Return a pointer to the top of the local mark stack.		        *//* *next is replaced by a pointer to the next unscanned mark stack	*//* entry.								*/mse * GC_steal_mark_stack(mse * low, mse * high, mse * local,			  unsigned max, mse **next){    mse *p;    mse *top = local - 1;    unsigned i = 0;    /* Make sure that prior writes to the mark stack are visible. */    /* On some architectures, the fact that the reads are 	  */    /* volatile should suffice.					  */#   if !defined(IA64) && !defined(HP_PA) && !defined(I386)      GC_memory_barrier();#   endif    GC_ASSERT(high >= low-1 && high - low + 1 <= GC_mark_stack_size);    for (p = low; p <= high && i <= max; ++p) {	word descr = *(volatile word *) &(p -> mse_descr);	/* In the IA64 memory model, the following volatile store is	*/	/* ordered after this read of descr.  Thus a thread must read 	*/	/* the original nonzero value.  HP_PA appears to be similar,	*/	/* and if I'm reading the P4 spec correctly, X86 is probably 	*/	/* also OK.  In some other cases we need a barrier.		*/#       if !defined(IA64) && !defined(HP_PA) && !defined(I386)          GC_memory_barrier();#       endif	if (descr != 0) {	    *(volatile word *) &(p -> mse_descr) = 0;	    /* More than one thread may get this entry, but that's only */	    /* a minor performance problem.				*/	    ++top;	    top -> mse_descr = descr;	    top -> mse_start = p -> mse_start;	    GC_ASSERT(  top -> mse_descr & GC_DS_TAGS != GC_DS_LENGTH || 			top -> mse_descr < GC_greatest_plausible_heap_addr			                   - GC_least_plausible_heap_addr);	    /* If this is a big object, count it as			*/	    /* size/256 + 1 objects.					*/	    ++i;	    if ((descr & GC_DS_TAGS) == GC_DS_LENGTH) i += (descr >> 8);	}    }    *next = p;    return top;}/* Copy back a local mark stack.	*//* low and high are inclusive bounds.	*/void GC_return_mark_stack(mse * low, mse * high){    mse * my_top;    mse * my_start;    size_t stack_size;    if (high < low) return;    stack_size = high - low + 1;    GC_acquire_mark_lock();    my_top = GC_mark_stack_top;    my_start = my_top + 1;    if (my_start - GC_mark_stack + stack_size > GC_mark_stack_size) {#     ifdef CONDPRINT	if (GC_print_stats) {	  GC_printf0("No room to copy back mark stack.");	}#     endif      GC_mark_state = MS_INVALID;      GC_mark_stack_too_small = TRUE;      /* We drop the local mark stack.  We'll fix things later.	*/    } else {      BCOPY(low, my_start, stack_size * sizeof(mse));      GC_ASSERT(GC_mark_stack_top = my_top);#     if !defined(IA64) && !defined(HP_PA)        GC_memory_barrier();#     endif	/* On IA64, the volatile write acts as a release barrier. */      GC_mark_stack_top = my_top + stack_size;    }    GC_release_mark_lock();    GC_notify_all_marker();}/* Mark from the local mark stack.		*//* On return, the local mark stack is empty.	*/

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -