⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 i386.c

📁 GCC编译器源代码
💻 C
📖 第 1 页 / 共 5 页
字号:
    }  if (max_tmps == 0)    fatal_insn ("No scratch registers were found to do memory->memory moves",		insn);  if ((length & 1) != 0)    {      if (qi_tmp == 0)	fatal_insn ("No byte register found when moving odd # of bytes.",		    insn);    }  while (length > 1)    {      for (num_tmps = 0; num_tmps < max_tmps; num_tmps++)	{	  if (length >= 4)	    {	      tmp_info[num_tmps].load    = AS2(mov%L0,%1,%2);	      tmp_info[num_tmps].store   = AS2(mov%L0,%2,%0);	      tmp_info[num_tmps].xops[0]		= adj_offsettable_operand (dest, offset);	      tmp_info[num_tmps].xops[1]		= adj_offsettable_operand (src, offset);	      offset += 4;	      length -= 4;	    }	  else if (length >= 2)	    {	      tmp_info[num_tmps].load    = AS2(mov%W0,%1,%2);	      tmp_info[num_tmps].store   = AS2(mov%W0,%2,%0);	      tmp_info[num_tmps].xops[0]		= adj_offsettable_operand (dest, offset);	      tmp_info[num_tmps].xops[1]		= adj_offsettable_operand (src, offset);	      offset += 2;	      length -= 2;	    }	  else	    break;	}      for (i = 0; i < num_tmps; i++)	output_asm_insn (tmp_info[i].load, tmp_info[i].xops);      for (i = 0; i < num_tmps; i++)	output_asm_insn (tmp_info[i].store, tmp_info[i].xops);    }  if (length == 1)    {      xops[0] = adj_offsettable_operand (dest, offset);      xops[1] = adj_offsettable_operand (src, offset);      xops[2] = qi_tmp;      output_asm_insn (AS2(mov%B0,%1,%2), xops);      output_asm_insn (AS2(mov%B0,%2,%0), xops);    }  return "";}intstandard_80387_constant_p (x)     rtx x;{#if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)  REAL_VALUE_TYPE d;  jmp_buf handler;  int is0, is1;  if (setjmp (handler))    return 0;  set_float_handler (handler);  REAL_VALUE_FROM_CONST_DOUBLE (d, x);  is0 = REAL_VALUES_EQUAL (d, dconst0) && !REAL_VALUE_MINUS_ZERO (d);  is1 = REAL_VALUES_EQUAL (d, dconst1);  set_float_handler (NULL_PTR);  if (is0)    return 1;  if (is1)    return 2;  /* Note that on the 80387, other constants, such as pi,     are much slower to load as standard constants     than to load from doubles in memory!  */#endif  return 0;}char *output_move_const_single (operands)     rtx *operands;{  if (FP_REG_P (operands[0]))    {      int conval = standard_80387_constant_p (operands[1]);      if (conval == 1)	return "fldz";      if (conval == 2)	return "fld1";    }  if (GET_CODE (operands[1]) == CONST_DOUBLE)    {      REAL_VALUE_TYPE r; long l;      if (GET_MODE (operands[1]) == XFmode)	abort ();      REAL_VALUE_FROM_CONST_DOUBLE (r, operands[1]);      REAL_VALUE_TO_TARGET_SINGLE (r, l);      operands[1] = GEN_INT (l);    }  return singlemove_string (operands);}/* Returns 1 if OP is either a symbol reference or a sum of a symbol   reference and a constant.  */intsymbolic_operand (op, mode)     register rtx op;     enum machine_mode mode;{  switch (GET_CODE (op))    {    case SYMBOL_REF:    case LABEL_REF:      return 1;    case CONST:      op = XEXP (op, 0);      return ((GET_CODE (XEXP (op, 0)) == SYMBOL_REF	       || GET_CODE (XEXP (op, 0)) == LABEL_REF)	      && GET_CODE (XEXP (op, 1)) == CONST_INT);    default:      return 0;    }}/* Test for a valid operand for a call instruction.   Don't allow the arg pointer register or virtual regs   since they may change into reg + const, which the patterns   can't handle yet.  */intcall_insn_operand (op, mode)     rtx op;     enum machine_mode mode;{  if (GET_CODE (op) == MEM      && ((CONSTANT_ADDRESS_P (XEXP (op, 0))	   /* This makes a difference for PIC.  */	   && general_operand (XEXP (op, 0), Pmode))	  || (GET_CODE (XEXP (op, 0)) == REG	      && XEXP (op, 0) != arg_pointer_rtx	      && ! (REGNO (XEXP (op, 0)) >= FIRST_PSEUDO_REGISTER		    && REGNO (XEXP (op, 0)) <= LAST_VIRTUAL_REGISTER))))    return 1;  return 0;}/* Like call_insn_operand but allow (mem (symbol_ref ...))   even if pic.  */intexpander_call_insn_operand (op, mode)     rtx op;     enum machine_mode mode;{  if (GET_CODE (op) == MEM      && (CONSTANT_ADDRESS_P (XEXP (op, 0))	  || (GET_CODE (XEXP (op, 0)) == REG	      && XEXP (op, 0) != arg_pointer_rtx	      && ! (REGNO (XEXP (op, 0)) >= FIRST_PSEUDO_REGISTER		    && REGNO (XEXP (op, 0)) <= LAST_VIRTUAL_REGISTER))))    return 1;  return 0;}/* Return 1 if OP is a comparison operator that can use the condition code   generated by an arithmetic operation. */intarithmetic_comparison_operator (op, mode)     register rtx op;     enum machine_mode mode;{  enum rtx_code code;  if (mode != VOIDmode && mode != GET_MODE (op))    return 0;  code = GET_CODE (op);  if (GET_RTX_CLASS (code) != '<')    return 0;  return (code != GT && code != LE);}/* Returns 1 if OP contains a symbol reference */intsymbolic_reference_mentioned_p (op)     rtx op;{  register char *fmt;  register int i;  if (GET_CODE (op) == SYMBOL_REF || GET_CODE (op) == LABEL_REF)    return 1;  fmt = GET_RTX_FORMAT (GET_CODE (op));  for (i = GET_RTX_LENGTH (GET_CODE (op)) - 1; i >= 0; i--)    {      if (fmt[i] == 'E')	{	  register int j;	  for (j = XVECLEN (op, i) - 1; j >= 0; j--)	    if (symbolic_reference_mentioned_p (XVECEXP (op, i, j)))	      return 1;	}      else if (fmt[i] == 'e' && symbolic_reference_mentioned_p (XEXP (op, i)))	return 1;    }  return 0;}/* Attempt to expand a binary operator.  Make the expansion closer to the   actual machine, then just general_operand, which will allow 3 separate   memory references (one output, two input) in a single insn.  Return   whether the insn fails, or succeeds.  */intix86_expand_binary_operator (code, mode, operands)     enum rtx_code code;     enum machine_mode mode;     rtx operands[];{  rtx insn;  int i;  int modified;  /* Recognize <var1> = <value> <op> <var1> for commutative operators */  if (GET_RTX_CLASS (code) == 'c'      && (rtx_equal_p (operands[0], operands[2])	  || immediate_operand (operands[1], mode)))    {      rtx temp = operands[1];      operands[1] = operands[2];      operands[2] = temp;    }  /* If optimizing, copy to regs to improve CSE */  if (TARGET_PSEUDO && optimize      && ((reload_in_progress | reload_completed) == 0))    {      if (GET_CODE (operands[1]) == MEM	  && ! rtx_equal_p (operands[0], operands[1]))	operands[1] = force_reg (GET_MODE (operands[1]), operands[1]);      if (GET_CODE (operands[2]) == MEM)	operands[2] = force_reg (GET_MODE (operands[2]), operands[2]);      if (GET_CODE (operands[1]) == CONST_INT && code == MINUS)	{	  rtx temp = gen_reg_rtx (GET_MODE (operands[0]));	  emit_move_insn (temp, operands[1]);	  operands[1] = temp;	  return TRUE;	}	      }  if (!ix86_binary_operator_ok (code, mode, operands))    {      /* If not optimizing, try to make a valid insn (optimize code	 previously did this above to improve chances of CSE) */      if ((! TARGET_PSEUDO || !optimize)	  && ((reload_in_progress | reload_completed) == 0)	  && (GET_CODE (operands[1]) == MEM || GET_CODE (operands[2]) == MEM))	{	  modified = FALSE;	  if (GET_CODE (operands[1]) == MEM	      && ! rtx_equal_p (operands[0], operands[1]))	    {	      operands[1] = force_reg (GET_MODE (operands[1]), operands[1]);	      modified = TRUE;	    }	  if (GET_CODE (operands[2]) == MEM)	    {	      operands[2] = force_reg (GET_MODE (operands[2]), operands[2]);	      modified = TRUE;	    }	  if (GET_CODE (operands[1]) == CONST_INT && code == MINUS)	    {	      rtx temp = gen_reg_rtx (GET_MODE (operands[0]));	      emit_move_insn (temp, operands[1]);	      operands[1] = temp;	      return TRUE;	    }	  	  if (modified && ! ix86_binary_operator_ok (code, mode, operands))	    return FALSE;	}      else	return FALSE;    }  return TRUE;}/* Return TRUE or FALSE depending on whether the binary operator meets the   appropriate constraints.  */intix86_binary_operator_ok (code, mode, operands)     enum rtx_code code;     enum machine_mode mode;     rtx operands[3];{  return (GET_CODE (operands[1]) != MEM || GET_CODE (operands[2]) != MEM)    && (GET_CODE (operands[1]) != CONST_INT || GET_RTX_CLASS (code) == 'c');}/* Attempt to expand a unary operator.  Make the expansion closer to the   actual machine, then just general_operand, which will allow 2 separate   memory references (one output, one input) in a single insn.  Return   whether the insn fails, or succeeds.  */intix86_expand_unary_operator (code, mode, operands)     enum rtx_code code;     enum machine_mode mode;     rtx operands[];{  rtx insn;  /* If optimizing, copy to regs to improve CSE */  if (TARGET_PSEUDO      && optimize      && ((reload_in_progress | reload_completed) == 0)      && GET_CODE (operands[1]) == MEM)    operands[1] = force_reg (GET_MODE (operands[1]), operands[1]);  if (! ix86_unary_operator_ok (code, mode, operands))    {      if ((! TARGET_PSEUDO || optimize == 0)	  && ((reload_in_progress | reload_completed) == 0)	  && GET_CODE (operands[1]) == MEM)	{	  operands[1] = force_reg (GET_MODE (operands[1]), operands[1]);	  if (! ix86_unary_operator_ok (code, mode, operands))	    return FALSE;	}      else	return FALSE;    }  return TRUE;}/* Return TRUE or FALSE depending on whether the unary operator meets the   appropriate constraints.  */intix86_unary_operator_ok (code, mode, operands)     enum rtx_code code;     enum machine_mode mode;     rtx operands[2];{  return TRUE;}static rtx pic_label_rtx;static char pic_label_name [256];static int pic_label_no = 0;/* This function generates code for -fpic that loads %ebx with   with the return address of the caller and then returns.  */voidasm_output_function_prefix (file, name)     FILE *file;     char *name;{  rtx xops[2];  int pic_reg_used = flag_pic && (current_function_uses_pic_offset_table				  || current_function_uses_const_pool);  xops[0] = pic_offset_table_rtx;  xops[1] = stack_pointer_rtx;  /* Deep branch prediction favors having a return for every call. */  if (pic_reg_used && TARGET_DEEP_BRANCH_PREDICTION)    {      tree prologue_node;      if (pic_label_rtx == 0)	{	  pic_label_rtx = gen_label_rtx ();	  sprintf (pic_label_name, "LPR%d", pic_label_no++);	  LABEL_NAME (pic_label_rtx) = pic_label_name;	}      prologue_node = make_node (FUNCTION_DECL);      DECL_RESULT (prologue_node) = 0;#ifdef ASM_DECLARE_FUNCTION_NAME      ASM_DECLARE_FUNCTION_NAME (file, pic_label_name, prologue_node);#endif      output_asm_insn ("movl (%1),%0", xops);      output_asm_insn ("ret", xops);    }}/* Generate the assembly code for function entry.   FILE is an stdio stream to output the code to.   SIZE is an int: how many units of temporary storage to allocate. */voidfunction_prologue (file, size)     FILE *file;     int size;{  if (TARGET_SCHEDULE_PROLOGUE)    {      pic_label_rtx = 0;      return;    }    ix86_prologue (0);}/* Expand the prologue into a bunch of separate insns. */voidix86_expand_prologue (){  if (! TARGET_SCHEDULE_PROLOGUE)      return;   ix86_prologue (1);}voidload_pic_register (do_rtl)     int do_rtl;{  rtx xops[4];  if (TARGET_DEEP_BRANCH_PREDICTION)    {      xops[0] = pic_offset_table_rtx;      if (pic_label_rtx == 0)	{	  pic_label_rtx = gen_label_rtx ();	  sprintf (pic_label_name, "LPR%d", pic_label_no++);	  LABEL_NAME (pic_label_rtx) = pic_label_name;	}      xops[1] = gen_rtx (MEM, QImode,			 gen_rtx (SYMBOL_REF, Pmode,				  LABEL_NAME (pic_label_rtx)));      if (do_rtl)	{	  emit_insn (gen_prologue_get_pc (xops[0], xops[1]));	  emit_insn (gen_prologue_set_got (xops[0], 					   gen_rtx (SYMBOL_REF, Pmode,

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -