⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 gc_pmark.h

📁 著名的boost库
💻 H
📖 第 1 页 / 共 2 页
字号:
/* here.  Note in particular tha the "displ" value is the displacement	*/
/* from the beggining of the heap block, which may itself be in the	*/
/* interior of a large object.						*/
#ifdef MARK_BIT_PER_GRANULE
# define PUSH_CONTENTS_HDR(current, mark_stack_top, mark_stack_limit, \
		           source, exit_label, hhdr, do_offset_check) \
{ \
    size_t displ = HBLKDISPL(current); /* Displacement in block; in bytes. */\
    /* displ is always within range.  If current doesn't point to	*/ \
    /* first block, then we are in the all_interior_pointers case, and	*/ \
    /* it is safe to use any displacement value.			*/ \
    size_t gran_displ = BYTES_TO_GRANULES(displ); \
    size_t gran_offset = hhdr -> hb_map[gran_displ];	\
    size_t byte_offset = displ & (GRANULE_BYTES - 1); \
    ptr_t base = current;  \
    /* The following always fails for large block references. */ \
    if (EXPECT((gran_offset | byte_offset) != 0, FALSE))  { \
	if (hhdr -> hb_large_block) { \
	  /* gran_offset is bogus.	*/ \
	  size_t obj_displ; \
	  base = (ptr_t)(hhdr -> hb_block); \
	  obj_displ = (ptr_t)(current) - base;  \
	  if (obj_displ != displ) { \
	    GC_ASSERT(obj_displ < hhdr -> hb_sz); \
	    /* Must be in all_interior_pointer case, not first block */ \
	    /* already did validity check on cache miss.	     */ \
	    ; \
	  } else { \
	    if (do_offset_check && !GC_valid_offsets[obj_displ]) { \
	      GC_ADD_TO_BLACK_LIST_NORMAL(current, source); \
	      goto exit_label; \
	    } \
	  } \
	  gran_displ = 0; \
	  GC_ASSERT(hhdr -> hb_sz > HBLKSIZE || \
		    hhdr -> hb_block == HBLKPTR(current)); \
	  GC_ASSERT((ptr_t)(hhdr -> hb_block) <= (ptr_t) current); \
	} else { \
	  size_t obj_displ = GRANULES_TO_BYTES(gran_offset) \
		      	     + byte_offset; \
	  if (do_offset_check && !GC_valid_offsets[obj_displ]) { \
	    GC_ADD_TO_BLACK_LIST_NORMAL(current, source); \
	    goto exit_label; \
	  } \
	  gran_displ -= gran_offset; \
	  base -= obj_displ; \
	} \
    } \
    GC_ASSERT(hhdr == GC_find_header(base)); \
    GC_ASSERT(gran_displ % BYTES_TO_GRANULES(hhdr -> hb_sz) == 0); \
    TRACE(source, GC_log_printf("GC:%d: passed validity tests\n",GC_gc_no)); \
    SET_MARK_BIT_EXIT_IF_SET(hhdr, gran_displ, exit_label); \
    TRACE(source, GC_log_printf("GC:%d: previously unmarked\n",GC_gc_no)); \
    TRACE_TARGET(base, \
	GC_log_printf("GC:%d: marking %p from %p instead\n", GC_gc_no, \
		      base, source)); \
    INCR_MARKS(hhdr); \
    GC_STORE_BACK_PTR((ptr_t)source, base); \
    PUSH_OBJ(base, hhdr, mark_stack_top, mark_stack_limit); \
}
#endif /* MARK_BIT_PER_GRANULE */

#ifdef MARK_BIT_PER_OBJ
# define PUSH_CONTENTS_HDR(current, mark_stack_top, mark_stack_limit, \
		           source, exit_label, hhdr, do_offset_check) \
{ \
    size_t displ = HBLKDISPL(current); /* Displacement in block; in bytes. */\
    unsigned32 low_prod, high_prod, offset_fraction; \
    unsigned32 inv_sz = hhdr -> hb_inv_sz; \
    ptr_t base = current;  \
    LONG_MULT(high_prod, low_prod, displ, inv_sz); \
    /* product is > and within sz_in_bytes of displ * sz_in_bytes * 2**32 */ \
    if (EXPECT(low_prod >> 16 != 0, FALSE))  { \
	    FIXME: fails if offset is a multiple of HBLKSIZE which becomes 0 \
	if (inv_sz == LARGE_INV_SZ) { \
	  size_t obj_displ; \
	  base = (ptr_t)(hhdr -> hb_block); \
	  obj_displ = (ptr_t)(current) - base;  \
	  if (obj_displ != displ) { \
	    GC_ASSERT(obj_displ < hhdr -> hb_sz); \
	    /* Must be in all_interior_pointer case, not first block */ \
	    /* already did validity check on cache miss.	     */ \
	    ; \
	  } else { \
	    if (do_offset_check && !GC_valid_offsets[obj_displ]) { \
	      GC_ADD_TO_BLACK_LIST_NORMAL(current, source); \
	      goto exit_label; \
	    } \
	  } \
	  GC_ASSERT(hhdr -> hb_sz > HBLKSIZE || \
		    hhdr -> hb_block == HBLKPTR(current)); \
	  GC_ASSERT((ptr_t)(hhdr -> hb_block) < (ptr_t) current); \
	} else { \
	  /* Accurate enough if HBLKSIZE <= 2**15.	*/ \
	  GC_ASSERT(HBLKSIZE <= (1 << 15)); \
	  size_t obj_displ = (((low_prod >> 16) + 1) * (hhdr -> hb_sz)) >> 16; \
	  if (do_offset_check && !GC_valid_offsets[obj_displ]) { \
	    GC_ADD_TO_BLACK_LIST_NORMAL(current, source); \
	    goto exit_label; \
	  } \
	  base -= obj_displ; \
	} \
    } \
    /* May get here for pointer to start of block not at	*/ \
    /* beginning of object.  If so, it's valid, and we're fine. */ \
    GC_ASSERT(high_prod >= 0 && high_prod <= HBLK_OBJS(hhdr -> hb_sz)); \
    TRACE(source, GC_log_printf("GC:%d: passed validity tests\n",GC_gc_no)); \
    SET_MARK_BIT_EXIT_IF_SET(hhdr, high_prod, exit_label); \
    TRACE(source, GC_log_printf("GC:%d: previously unmarked\n",GC_gc_no)); \
    TRACE_TARGET(base, \
	GC_log_printf("GC:%d: marking %p from %p instead\n", GC_gc_no, \
		      base, source)); \
    INCR_MARKS(hhdr); \
    GC_STORE_BACK_PTR((ptr_t)source, base); \
    PUSH_OBJ(base, hhdr, mark_stack_top, mark_stack_limit); \
}
#endif /* MARK_BIT_PER_OBJ */

#if defined(PRINT_BLACK_LIST) || defined(KEEP_BACK_PTRS)
#   define PUSH_ONE_CHECKED_STACK(p, source) \
	GC_mark_and_push_stack(p, (ptr_t)(source))
#else
#   define PUSH_ONE_CHECKED_STACK(p, source) \
	GC_mark_and_push_stack(p)
#endif

/*
 * Push a single value onto mark stack. Mark from the object pointed to by p.
 * Invoke FIXUP_POINTER(p) before any further processing.
 * P is considered valid even if it is an interior pointer.
 * Previously marked objects are not pushed.  Hence we make progress even
 * if the mark stack overflows.
 */

# if NEED_FIXUP_POINTER
    /* Try both the raw version and the fixed up one.	*/
#   define GC_PUSH_ONE_STACK(p, source) \
      if ((p) >= (ptr_t)GC_least_plausible_heap_addr 	\
	 && (p) < (ptr_t)GC_greatest_plausible_heap_addr) {	\
	 PUSH_ONE_CHECKED_STACK(p, source);	\
      } \
      FIXUP_POINTER(p); \
      if ((p) >= (ptr_t)GC_least_plausible_heap_addr 	\
	 && (p) < (ptr_t)GC_greatest_plausible_heap_addr) {	\
	 PUSH_ONE_CHECKED_STACK(p, source);	\
      }
# else /* !NEED_FIXUP_POINTER */
#   define GC_PUSH_ONE_STACK(p, source) \
      if ((ptr_t)(p) >= (ptr_t)GC_least_plausible_heap_addr 	\
	 && (ptr_t)(p) < (ptr_t)GC_greatest_plausible_heap_addr) {	\
	 PUSH_ONE_CHECKED_STACK(p, source);	\
      }
# endif


/*
 * As above, but interior pointer recognition as for
 * normal heap pointers.
 */
# define GC_PUSH_ONE_HEAP(p,source) \
    FIXUP_POINTER(p); \
    if ((p) >= (ptr_t)GC_least_plausible_heap_addr 	\
	 && (p) < (ptr_t)GC_greatest_plausible_heap_addr) {	\
	    GC_mark_stack_top = GC_mark_and_push( \
			    (void *)(p), GC_mark_stack_top, \
			    GC_mark_stack_limit, (void * *)(source)); \
    }

/* Mark starting at mark stack entry top (incl.) down to	*/
/* mark stack entry bottom (incl.).  Stop after performing	*/
/* about one page worth of work.  Return the new mark stack	*/
/* top entry.							*/
mse * GC_mark_from(mse * top, mse * bottom, mse *limit);

#define MARK_FROM_MARK_STACK() \
	GC_mark_stack_top = GC_mark_from(GC_mark_stack_top, \
					 GC_mark_stack, \
					 GC_mark_stack + GC_mark_stack_size);

/*
 * Mark from one finalizable object using the specified
 * mark proc. May not mark the object pointed to by 
 * real_ptr. That is the job of the caller, if appropriate.
 * Note that this is called with the mutator running, but
 * with us holding the allocation lock.  This is safe only if the
 * mutator needs tha allocation lock to reveal hidden pointers.
 * FIXME: Why do we need the GC_mark_state test below?
 */
# define GC_MARK_FO(real_ptr, mark_proc) \
{ \
    (*(mark_proc))(real_ptr); \
    while (!GC_mark_stack_empty()) MARK_FROM_MARK_STACK(); \
    if (GC_mark_state != MS_NONE) { \
        GC_set_mark_bit(real_ptr); \
        while (!GC_mark_some((ptr_t)0)) {} \
    } \
}

extern GC_bool GC_mark_stack_too_small;
				/* We need a larger mark stack.  May be	*/
				/* set by client supplied mark routines.*/

typedef int mark_state_t;	/* Current state of marking, as follows:*/
				/* Used to remember where we are during */
				/* concurrent marking.			*/

				/* We say something is dirty if it was	*/
				/* written since the last time we	*/
				/* retrieved dirty bits.  We say it's 	*/
				/* grungy if it was marked dirty in the	*/
				/* last set of bits we retrieved.	*/
				
				/* Invariant I: all roots and marked	*/
				/* objects p are either dirty, or point */
				/* to objects q that are either marked 	*/
				/* or a pointer to q appears in a range	*/
				/* on the mark stack.			*/

# define MS_NONE 0		/* No marking in progress. I holds.	*/
				/* Mark stack is empty.			*/

# define MS_PUSH_RESCUERS 1	/* Rescuing objects are currently 	*/
				/* being pushed.  I holds, except	*/
				/* that grungy roots may point to 	*/
				/* unmarked objects, as may marked	*/
				/* grungy objects above scan_ptr.	*/

# define MS_PUSH_UNCOLLECTABLE 2
				/* I holds, except that marked 		*/
				/* uncollectable objects above scan_ptr */
				/* may point to unmarked objects.	*/
				/* Roots may point to unmarked objects	*/

# define MS_ROOTS_PUSHED 3	/* I holds, mark stack may be nonempty  */

# define MS_PARTIALLY_INVALID 4	/* I may not hold, e.g. because of M.S. */
				/* overflow.  However marked heap	*/
				/* objects below scan_ptr point to	*/
				/* marked or stacked objects.		*/

# define MS_INVALID 5		/* I may not hold.			*/

extern mark_state_t GC_mark_state;

#endif  /* GC_PMARK_H */

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -