⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 alpha.c

📁 gcc3.2.1源代码
💻 C
📖 第 1 页 / 共 5 页
字号:
  for_each_rtx (&x, split_small_symbolic_operand_1, NULL);  return x;}static intsplit_small_symbolic_operand_1 (px, data)     rtx *px;     void *data ATTRIBUTE_UNUSED;{  rtx x = *px;  /* Don't re-split.  */  if (GET_CODE (x) == LO_SUM)    return -1;  if (small_symbolic_operand (x, Pmode))    {      x = gen_rtx_LO_SUM (Pmode, pic_offset_table_rtx, x);      *px = x;      return -1;    }  return 0;}/* Try a machine-dependent way of reloading an illegitimate address   operand.  If we find one, push the reload and return the new rtx.  */   rtxalpha_legitimize_reload_address (x, mode, opnum, type, ind_levels)     rtx x;     enum machine_mode mode ATTRIBUTE_UNUSED;     int opnum;     int type;     int ind_levels ATTRIBUTE_UNUSED;{  /* We must recognize output that we have already generated ourselves.  */  if (GET_CODE (x) == PLUS      && GET_CODE (XEXP (x, 0)) == PLUS      && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG      && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT      && GET_CODE (XEXP (x, 1)) == CONST_INT)    {      push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,		   BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,		   opnum, type);      return x;    }  /* We wish to handle large displacements off a base register by     splitting the addend across an ldah and the mem insn.  This     cuts number of extra insns needed from 3 to 1.  */  if (GET_CODE (x) == PLUS      && GET_CODE (XEXP (x, 0)) == REG      && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER      && REGNO_OK_FOR_BASE_P (REGNO (XEXP (x, 0)))      && GET_CODE (XEXP (x, 1)) == CONST_INT)    {      HOST_WIDE_INT val = INTVAL (XEXP (x, 1));      HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000;      HOST_WIDE_INT high	= (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000;      /* Check for 32-bit overflow.  */      if (high + low != val)	return NULL_RTX;      /* Reload the high part into a base reg; leave the low part	 in the mem directly.  */      x = gen_rtx_PLUS (GET_MODE (x),			gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0),				      GEN_INT (high)),			GEN_INT (low));      push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL,		   BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0,		   opnum, type);      return x;    }  return NULL_RTX;}/* REF is an alignable memory location.  Place an aligned SImode   reference into *PALIGNED_MEM and the number of bits to shift into   *PBITNUM.  SCRATCH is a free register for use in reloading out   of range stack slots.  */voidget_aligned_mem (ref, paligned_mem, pbitnum)     rtx ref;     rtx *paligned_mem, *pbitnum;{  rtx base;  HOST_WIDE_INT offset = 0;  if (GET_CODE (ref) != MEM)    abort ();  if (reload_in_progress      && ! memory_address_p (GET_MODE (ref), XEXP (ref, 0)))    {      base = find_replacement (&XEXP (ref, 0));      if (! memory_address_p (GET_MODE (ref), base))	abort ();    }  else    {      base = XEXP (ref, 0);    }  if (GET_CODE (base) == PLUS)    offset += INTVAL (XEXP (base, 1)), base = XEXP (base, 0);  *paligned_mem    = widen_memory_access (ref, SImode, (offset & ~3) - offset);  if (WORDS_BIG_ENDIAN)    *pbitnum = GEN_INT (32 - (GET_MODE_BITSIZE (GET_MODE (ref))			      + (offset & 3) * 8));  else    *pbitnum = GEN_INT ((offset & 3) * 8);}/* Similar, but just get the address.  Handle the two reload cases.     Add EXTRA_OFFSET to the address we return.  */rtxget_unaligned_address (ref, extra_offset)     rtx ref;     int extra_offset;{  rtx base;  HOST_WIDE_INT offset = 0;  if (GET_CODE (ref) != MEM)    abort ();  if (reload_in_progress      && ! memory_address_p (GET_MODE (ref), XEXP (ref, 0)))    {      base = find_replacement (&XEXP (ref, 0));      if (! memory_address_p (GET_MODE (ref), base))	abort ();    }  else    {      base = XEXP (ref, 0);    }  if (GET_CODE (base) == PLUS)    offset += INTVAL (XEXP (base, 1)), base = XEXP (base, 0);  return plus_constant (base, offset + extra_offset);}/* On the Alpha, all (non-symbolic) constants except zero go into   a floating-point register via memory.  Note that we cannot    return anything that is not a subset of CLASS, and that some   symbolic constants cannot be dropped to memory.  */enum reg_classalpha_preferred_reload_class(x, class)     rtx x;     enum reg_class class;{  /* Zero is present in any register class.  */  if (x == CONST0_RTX (GET_MODE (x)))    return class;  /* These sorts of constants we can easily drop to memory.  */  if (GET_CODE (x) == CONST_INT || GET_CODE (x) == CONST_DOUBLE)    {      if (class == FLOAT_REGS)	return NO_REGS;      if (class == ALL_REGS)	return GENERAL_REGS;      return class;    }  /* All other kinds of constants should not (and in the case of HIGH     cannot) be dropped to memory -- instead we use a GENERAL_REGS     secondary reload.  */  if (CONSTANT_P (x))    return (class == ALL_REGS ? GENERAL_REGS : class);  return class;}/* Loading and storing HImode or QImode values to and from memory   usually requires a scratch register.  The exceptions are loading   QImode and HImode from an aligned address to a general register   unless byte instructions are permitted.    We also cannot load an unaligned address or a paradoxical SUBREG   into an FP register.    We also cannot do integral arithmetic into FP regs, as might result   from register elimination into a DImode fp register.  */enum reg_classsecondary_reload_class (class, mode, x, in)     enum reg_class class;     enum machine_mode mode;     rtx x;     int in;{  if ((mode == QImode || mode == HImode) && ! TARGET_BWX)    {      if (GET_CODE (x) == MEM	  || (GET_CODE (x) == REG && REGNO (x) >= FIRST_PSEUDO_REGISTER)	  || (GET_CODE (x) == SUBREG	      && (GET_CODE (SUBREG_REG (x)) == MEM		  || (GET_CODE (SUBREG_REG (x)) == REG		      && REGNO (SUBREG_REG (x)) >= FIRST_PSEUDO_REGISTER))))	{	  if (!in || !aligned_memory_operand(x, mode))	    return GENERAL_REGS;	}    }  if (class == FLOAT_REGS)    {      if (GET_CODE (x) == MEM && GET_CODE (XEXP (x, 0)) == AND)	return GENERAL_REGS;      if (GET_CODE (x) == SUBREG	  && (GET_MODE_SIZE (GET_MODE (x))	      > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))	return GENERAL_REGS;      if (in && INTEGRAL_MODE_P (mode)	  && ! (memory_operand (x, mode) || x == const0_rtx))	return GENERAL_REGS;    }  return NO_REGS;}/* Subfunction of the following function.  Update the flags of any MEM   found in part of X.  */static voidalpha_set_memflags_1 (x, in_struct_p, volatile_p, unchanging_p)     rtx x;     int in_struct_p, volatile_p, unchanging_p;{  int i;  switch (GET_CODE (x))    {    case SEQUENCE:    case PARALLEL:      for (i = XVECLEN (x, 0) - 1; i >= 0; i--)	alpha_set_memflags_1 (XVECEXP (x, 0, i), in_struct_p, volatile_p,			      unchanging_p);      break;    case INSN:      alpha_set_memflags_1 (PATTERN (x), in_struct_p, volatile_p,			    unchanging_p);      break;    case SET:      alpha_set_memflags_1 (SET_DEST (x), in_struct_p, volatile_p,			    unchanging_p);      alpha_set_memflags_1 (SET_SRC (x), in_struct_p, volatile_p,			    unchanging_p);      break;    case MEM:      MEM_IN_STRUCT_P (x) = in_struct_p;      MEM_VOLATILE_P (x) = volatile_p;      RTX_UNCHANGING_P (x) = unchanging_p;      /* Sadly, we cannot use alias sets because the extra aliasing	 produced by the AND interferes.  Given that two-byte quantities	 are the only thing we would be able to differentiate anyway,	 there does not seem to be any point in convoluting the early	 out of the alias check.  */      break;    default:      break;    }}/* Given INSN, which is either an INSN or a SEQUENCE generated to   perform a memory operation, look for any MEMs in either a SET_DEST or   a SET_SRC and copy the in-struct, unchanging, and volatile flags from   REF into each of the MEMs found.  If REF is not a MEM, don't do   anything.  */voidalpha_set_memflags (insn, ref)     rtx insn;     rtx ref;{  int in_struct_p, volatile_p, unchanging_p;  if (GET_CODE (ref) != MEM)    return;  in_struct_p = MEM_IN_STRUCT_P (ref);  volatile_p = MEM_VOLATILE_P (ref);  unchanging_p = RTX_UNCHANGING_P (ref);  /* This is only called from alpha.md, after having had something      generated from one of the insn patterns.  So if everything is     zero, the pattern is already up-to-date.  */  if (! in_struct_p && ! volatile_p && ! unchanging_p)    return;  alpha_set_memflags_1 (insn, in_struct_p, volatile_p, unchanging_p);}/* Try to output insns to set TARGET equal to the constant C if it can be   done in less than N insns.  Do all computations in MODE.  Returns the place   where the output has been placed if it can be done and the insns have been   emitted.  If it would take more than N insns, zero is returned and no   insns and emitted.  */rtxalpha_emit_set_const (target, mode, c, n)     rtx target;     enum machine_mode mode;     HOST_WIDE_INT c;     int n;{  rtx result = 0;  rtx orig_target = target;  int i;  /* If we can't make any pseudos, TARGET is an SImode hard register, we     can't load this constant in one insn, do this in DImode.  */  if (no_new_pseudos && mode == SImode      && GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER      && (result = alpha_emit_set_const_1 (target, mode, c, 1)) == 0)    {      target = gen_lowpart (DImode, target);      mode = DImode;    }  /* Try 1 insn, then 2, then up to N.  */  for (i = 1; i <= n; i++)    {      result = alpha_emit_set_const_1 (target, mode, c, i);      if (result)	{	  rtx insn = get_last_insn ();	  rtx set = single_set (insn);	  if (! CONSTANT_P (SET_SRC (set)))	    set_unique_reg_note (get_last_insn (), REG_EQUAL, GEN_INT (c));	  break;	}    }  /* Allow for the case where we changed the mode of TARGET.  */  if (result == target)    result = orig_target;  return result;}/* Internal routine for the above to check for N or below insns.  */static rtxalpha_emit_set_const_1 (target, mode, c, n)     rtx target;     enum machine_mode mode;     HOST_WIDE_INT c;     int n;{  HOST_WIDE_INT new;  int i, bits;  /* Use a pseudo if highly optimizing and still generating RTL.  */  rtx subtarget    = (flag_expensive_optimizations && !no_new_pseudos ? 0 : target);  rtx temp, insn;  /* If this is a sign-extended 32-bit constant, we can do this in at most     three insns, so do it if we have enough insns left.  We always have     a sign-extended 32-bit constant when compiling on a narrow machine.  */  if (HOST_BITS_PER_WIDE_INT != 64      || c >> 31 == -1 || c >> 31 == 0)    {      HOST_WIDE_INT low = ((c & 0xffff) ^ 0x8000) - 0x8000;      HOST_WIDE_INT tmp1 = c - low;      HOST_WIDE_INT high = (((tmp1 >> 16) & 0xffff) ^ 0x8000) - 0x8000;      HOST_WIDE_INT extra = 0;      /* If HIGH will be interpreted as negative but the constant is	 positive, we must adjust it to do two ldha insns.  */      if ((high & 0x8000) != 0 && c >= 0)	{	  extra = 0x4000;	  tmp1 -= 0x40000000;	  high = ((tmp1 >> 16) & 0xffff) - 2 * ((tmp1 >> 16) & 0x8000);	}      if (c == low || (low == 0 && extra == 0))	{	  /* We used to use copy_to_suggested_reg (GEN_INT (c), target, mode)	     but that meant that we can't handle INT_MIN on 32-bit machines	     (like NT/Alpha), because we recurse indefinitely through 	     emit_move_insn to gen_movdi.  So instead, since we know exactly	     what we want, create it explicitly.  */	  if (target == NULL)	    target = gen_reg_rtx (mode);	  emit_insn (gen_rtx_SET (VOIDmode, target, GEN_INT (c)));	  return target;	}      else if (n >= 2 + (extra != 0))	{	  temp = copy_to_suggested_reg (GEN_INT (high << 16), subtarget, mode);	  /* As of 2002-02-23, addsi3 is only available when not optimizing.	     This means that if we go through expand_binop, we'll try to	     generate extensions, etc, which will require new pseudos, which	     will fail during some split phases.  The SImode add patterns	     still exist, but are not named.  So build the insns by hand.  */	  if (extra != 0)	    {	      if (! subtarget)		subtarget = gen_reg_rtx (mode);	      insn = gen_rtx_PLUS (mode, temp, GEN_INT (extra << 16));	      insn = gen_rtx_SET (VOIDmode, subtarget, insn);	      emit_insn (insn);	      temp = subtarget;	    }	  if (target == NULL)	    target = gen_reg_rtx (mode);	  insn = gen_rtx_PLUS (mode, temp, GEN_INT (low));	  insn = gen_rtx_SET (VOIDmode, target, insn);	  emit_insn (insn);	  return target;	}    }  /* If we couldn't do it that way, try some other methods.  But if we have     no instructions left, don't bother.  Likewise, if this is SImode and     we can't make pseudos, we can't do anything since the expand_binop     and expand_unop calls will widen and try to make pseudos.  */  if (n == 1 || (mode == SImode && no_new_pseudos))    return 0;  /* Next, see if we can load a related constant and then shift and possibly     negate it to get the constant we want.  Try this once each increasing     numbers of insns.  */  for (i = 1; i < n; i++)    {      /* First, see if minus some low bits, we've an easy load of	 high bits.  */      new = ((c & 0xffff) ^ 0x8000) - 0x8000;      if (new != 0          && (temp = alpha_emit_set_const (subtarget, mode, c - new, i)) != 0)	return expand_binop (mode, add_optab, temp, GEN_INT (new),			     target, 0, OPTAB_WIDEN);      /* Next try complementing.  */      if ((temp = alpha_emit_set_const (subtarget, mode, ~ c, i)) != 0)	return expand_unop (mode, one_cmpl_optab, temp, target, 0);      /* Next try to form a constant and do a left shift.  We can do this	 if some low-order bits are zero; the exact_log2 call below tells	 us that informat

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -