⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 expr.c

📁 这是完整的gcc源代码
💻 C
📖 第 1 页 / 共 5 页
字号:
    {      /* Try the most limited insn first, because there's no point	 including more than one in the machine description unless	 the more limited one has some advantage.  */#ifdef HAVE_movstrqi      if (HAVE_movstrqi	  && GET_CODE (size) == CONST_INT	  && ((unsigned) INTVAL (size)	      < (1 << (GET_MODE_BITSIZE (QImode) - 1))))	{	  emit_insn (gen_movstrqi (x, y, size,				   gen_rtx (CONST_INT, VOIDmode, align)));	  return;	}#endif#ifdef HAVE_movstrhi      if (HAVE_movstrhi	  && GET_CODE (size) == CONST_INT	  && ((unsigned) INTVAL (size)	      < (1 << (GET_MODE_BITSIZE (HImode) - 1))))	{	  emit_insn (gen_movstrhi (x, y, size,				   gen_rtx (CONST_INT, VOIDmode, align)));	  return;	}#endif#ifdef HAVE_movstrsi      if (HAVE_movstrsi)	{	  emit_insn (gen_movstrsi (x, y, size,				   gen_rtx (CONST_INT, VOIDmode, align)));	  return;	}#endif#ifdef TARGET_MEM_FUNCTIONS      emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "memcpy"), 0,			 VOIDmode, 3, XEXP (x, 0), Pmode,			 XEXP (y, 0), Pmode,			 size, Pmode);#else      emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "bcopy"), 0,			 VOIDmode, 3, XEXP (y, 0), Pmode,			 XEXP (x, 0), Pmode,			 size, Pmode);#endif    }}/* Copy all or part of a value X into registers starting at REGNO.   The number of registers to be filled is NREGS.  */static voidmove_block_to_reg (regno, x, nregs, align)     int regno;     rtx x;     int nregs;     int align;{  int i;  if (GET_CODE (x) == MEM && GET_CODE (XEXP (x, 0)) == QUEUED)    XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 0);  if (GET_CODE (x) == CONST_DOUBLE && x != dconst0_rtx)    x = force_const_double_mem (x);  for (i = 0; i < nregs; i++)    {      if (GET_CODE (x) == REG)	emit_move_insn (gen_rtx (REG, SImode, regno + i),			gen_rtx (SUBREG, SImode, x, i));      else if (x == dconst0_rtx || x == const0_rtx)	emit_move_insn (gen_rtx (REG, SImode, regno + i),			const0_rtx);      else	{	  int unaligned = (align < BITS_PER_WORD);	  rtx to = gen_rtx (REG, SImode, regno + i);	  rtx from = gen_rtx (MEM, SImode,			      memory_address (SImode,					      plus_constant (XEXP (x, 0),							     i * GET_MODE_SIZE (SImode))));#if (defined (HAVE_movsi_unaligned) || defined (STRICT_ALIGNMENT)) && defined (STACK_BOUNDARY)	  /* If this is a reference to an auto variable, and the structure	     is aligned appropriately, use normal aligned load.  */	  if (STACK_BOUNDARY >= BITS_PER_WORD && unaligned)	    {	      int offset = 0;	      rtx x_wo_const = eliminate_constant_term (XEXP (x, 0), &offset);	      if ((offset & ((BITS_PER_WORD / BITS_PER_UNIT) - 1)) == 0		  && (x_wo_const == frame_pointer_rtx		      || x_wo_const == stack_pointer_rtx		      || x_wo_const == arg_pointer_rtx))		unaligned = FALSE;	    }#endif#ifdef HAVE_movsi_unaligned	  if (unaligned)	    {	      emit_insn (gen_movsi_unaligned (to, from));	      return;	    }#else /* not HAVE_movsi_unaligned */#if 0 /* This gives spurious errors.  For example, a stack slot	 whose offset is out of range turns into (mem (plus (reg) (reg)))	 and gets this error.  */#ifdef STRICT_ALIGNMENT	  if (unaligned)	    error ("Attempt to move unaligned structure to register");#endif#endif /* 0 */#endif /* not HAVE_movsi_unaligned */   	  emit_move_insn (to, from);	}    }}/* Copy all or part of a BLKmode value X out of registers starting at REGNO.   The number of registers to be filled is NREGS.  */voidmove_block_from_reg (regno, x, nregs, align)     int regno;     rtx x;     int nregs;     int align;{  int i;  if (GET_CODE (XEXP (x, 0)) == QUEUED)    XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 1);  for (i = 0; i < nregs; i++)    {      if (GET_CODE (x) == REG)	emit_move_insn (gen_rtx (SUBREG, SImode, x, i),			gen_rtx (REG, SImode, regno + i));      else	{	  int unaligned = (align < BITS_PER_WORD);	  rtx from = gen_rtx (REG, SImode, regno + i);	  rtx to = gen_rtx (MEM, SImode,			    memory_address (SImode,					    plus_constant (XEXP (x, 0),							   i * GET_MODE_SIZE (SImode))));#if (defined(HAVE_movsi_unaligned) || defined(STRICT_ALIGNMENT)) && defined (STACK_BOUNDARY)	  /* If this is a reference to an auto variable, and the structure	     is aligned appropriately, use normal aligned load.  */	  if (STACK_BOUNDARY >= BITS_PER_WORD && unaligned)	    {	      int offset = 0;	      rtx x_wo_const = eliminate_constant_term (XEXP (x, 0), &offset);	      if ((offset & ((BITS_PER_WORD / BITS_PER_UNIT) - 1)) == 0		  && (x_wo_const == frame_pointer_rtx		      || x_wo_const == stack_pointer_rtx		      || x_wo_const == arg_pointer_rtx))		unaligned = FALSE;	    }#endif#ifdef HAVE_movsi_unaligned	  if (unaligned)	    {	      extern rtx gen_movsi_unaligned ();	      emit_insn (gen_movsi_unaligned (to, from));	      return;	    }#else /* not HAVE_movsi_unaligned */#if 0 /* This is to stay in sync with move_block_to_reg.  */#ifdef STRICT_ALIGNMENT	  if (unaligned)	    error ("Attempt to move unaligned structure to register");#endif#endif /* 0 */#endif /* not HAVE_movsi_unaligned */	  emit_move_insn (to, from);	}    }}/* Mark NREGS consecutive regs, starting at REGNO, as being live now.  */static voiduse_regs (regno, nregs)     int regno;     int nregs;{  int i;  for (i = 0; i < nregs; i++)    emit_insn (gen_rtx (USE, VOIDmode, gen_rtx (REG, SImode, regno + i)));}/* Write zeros through the storage of OBJECT.   If OBJECT has BLKmode, SIZE is its length in bytes.  */voidclear_storage (object, size)     rtx object;     int size;{  if (GET_MODE (object) == BLKmode)    {#ifdef TARGET_MEM_FUNCTIONS      emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "memset"), 0,			 VOIDmode, 3,			 XEXP (object, 0), Pmode, const0_rtx, Pmode,			 gen_rtx (CONST_INT, VOIDmode, size), Pmode);#else      emit_library_call (gen_rtx (SYMBOL_REF, Pmode, "bzero"), 0,			 VOIDmode, 2,			 XEXP (object, 0), Pmode,			 gen_rtx (CONST_INT, VOIDmode, size), Pmode);#endif    }  else    emit_move_insn (object, const0_rtx);}/* Generate code to copy Y into X.   Both Y and X must have the same mode, except that   Y can be a constant with VOIDmode.   This mode cannot be BLKmode; use emit_block_move for that.   Return the last instruction emitted.  */rtxemit_move_insn (x, y)     rtx x, y;{  enum machine_mode mode = GET_MODE (x);  x = protect_from_queue (x, 1);  y = protect_from_queue (y, 0);  if (mode == BLKmode)    abort ();  if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)    {      int icode = (int) mov_optab->handlers[(int) mode].insn_code;      if (! (*insn_operand_predicate[icode][1]) (y, mode)	  && (CONSTANT_P (y) || GET_CODE (y) == CONST_DOUBLE))	{	  y = force_const_mem (mode, y);	  if (! memory_address_p (mode, XEXP (y, 0)))	    y = gen_rtx (MEM, mode, memory_address (mode, XEXP (y, 0)));	}      return emit_insn (GEN_FCN (icode) (x, y));    }#if 0  /* It turns out you get much better optimization (in cse and flow)     if you define movdi and movdf instruction patterns     even if they must turn into multiple assembler instructions.  */  else if (GET_MODE_SIZE (mode) >= GET_MODE_SIZE (SImode))    {      register int count = GET_MODE_SIZE (mode) / GET_MODE_SIZE (SImode);      register int i;      if (GET_CODE (y) == CONST_DOUBLE && y != dconst0_rtx)	y = force_const_double_mem (y);      for (i = 0; i < count; i++)	{	  rtx x1, y1;	  if (GET_CODE (x) == REG)	    x1 = gen_rtx (SUBREG, SImode, x, i);	  else	    x1 = gen_rtx (MEM, SImode,			  memory_address (SImode,					  plus_constant (XEXP (x, 0),							 i * GET_MODE_SIZE (SImode))));	  if (GET_CODE (y) == REG)	    y1 = gen_rtx (SUBREG, SImode, y, i);	  else if (y == dconst0_rtx)	    y1 = const0_rtx;	  else	    y1 = gen_rtx (MEM, SImode,			  memory_address (SImode,					  plus_constant (XEXP (y, 0),							 i * GET_MODE_SIZE (SImode))));	  emit_insn (gen_movsi (protect_from_queue (x1, 1), protect_from_queue (y1, 0)));	}    }#endif  else    abort ();}/* Pushing data onto the stack.  *//* Push a block of length SIZE (perhaps variable)   and return an rtx to address the beginning of the block.   Note that it is not possible for the value returned to be a QUEUED.   The value may be stack_pointer_rtx.   EXTRA is the number of bytes of padding to push in addition to the block.   The padding is pushed "after" the specified size.   The value we return does take account of STACK_POINTER_OFFSET.  */rtxpush_block (size, extra)     rtx size;     int extra;{  register rtx temp;  if (CONSTANT_P (size))    anti_adjust_stack (plus_constant (size, extra));  else if (GET_CODE (size) == REG && extra == 0)    anti_adjust_stack (size);  else    {      rtx temp = copy_to_mode_reg (Pmode, size);      if (extra != 0)	temp = expand_binop (Pmode, add_optab,			     temp, gen_rtx (CONST_INT, VOIDmode, extra),			     temp, 0, OPTAB_LIB_WIDEN);      anti_adjust_stack (temp);    }#ifdef STACK_GROWS_DOWNWARD  temp = stack_pointer_rtx;  if (extra != 0)    temp = plus_constant (temp, extra);#else  temp = gen_rtx (PLUS, Pmode,		  stack_pointer_rtx,		  negate_rtx (Pmode, size));  if (GET_CODE (size) != CONST_INT)    temp = force_operand (temp, 0);  if (extra != 0)    temp = plus_constant (temp, -extra);#endif#ifdef STACK_POINTER_OFFSET  temp = plus_constant (temp, STACK_POINTER_OFFSET);#endif /* STACK_POINTER_OFFSET */  return memory_address (QImode, temp);}static rtxgen_push_operand (){  return gen_rtx (#ifdef STACK_GROWS_DOWNWARD		  PRE_DEC,#else		  PRE_INC,#endif		  Pmode,		  stack_pointer_rtx);}/* Generate code to push X onto the stack, assuming it has mode MODE.   MODE is redundant except when X is a CONST_INT (since they don't   carry mode info).   SIZE is an rtx for the size of data to be copied (in bytes),   needed only if X is BLKmode.   ALIGN (in bytes) is maximum alignment we can assume.   If PARTIAL is nonzero, then copy that many of the first words   of X into registers starting with REG, and push the rest of X.   The amount of space pushed is decreased by PARTIAL words,   rounded *down* to a multiple of PARM_BOUNDARY.   REG must be a hard register in this case.   EXTRA is the amount in bytes of extra space to leave next to this arg.   Within the function, we set EXTRA to zero once the padding is done,   to avoid padding twice.   On a machine that lacks real push insns, ARGS_ADDR is the address of   the bottom of the argument block for this call.  We use indexing off there   to store the arg.  On machines with push insns, ARGS_ADDR is 0.   ARGS_SO_FAR is the size of args previously pushed for this call.  */static voidemit_push_insn (x, mode, size, align, partial, reg, extra, args_addr, args_so_far)     register rtx x;     enum machine_mode mode;     rtx size;     int align;     int partial;     rtx reg;     int extra;     rtx args_addr;     rtx args_so_far;{  rtx xinner;  enum direction stack_direction#ifdef STACK_GROWS_DOWNWARD    = downward;#else    = upward;#endif  /* Decide where to pad the argument: `downward' for below,     `upward' for above, or `none' for don't pad it.     Default is below for small data on big-endian machines; else above.  */  enum direction where_pad = FUNCTION_ARG_PADDING (mode, size);  xinner = x = protect_from_queue (x, 0);  if (extra)    {      if (args_addr == 0)	{	  /* Push padding now if padding above and stack grows down,	     or if padding below and stack grows up.  */	  if (where_pad != none && where_pad != stack_direction)	    {	      anti_adjust_stack (gen_rtx (CONST_INT, VOIDmode, extra));	      extra = 0;	    }	}      else	{	  /* If space already allocated, just adjust the address we use.  */	  if (where_pad == downward)	    {	      args_so_far = plus_constant (args_so_far, extra);	    }	  /* If padding comes after a space already allocated,	     there is nothing to do.  */	  extra = 0;	}    }  if (mode == BLKmode)    {      /* Copy a block into the stack, entirely or partially.  */      register rtx temp;      int used = partial * UNITS_PER_WORD;      int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);      int skip;            if (size == 0)	abort ();      used -= offset;      /* USED is now the # of bytes we need not copy to the stack	 because registers will take care of them.  */

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -