📄 integrate.c
字号:
/* If this is a CONST_DOUBLE, don't try to fix things up in CONST_DOUBLE_MEM, because this is an infinite recursion. */ if (GET_CODE (x) == CONST_DOUBLE) return; else if (GET_CODE (x) == MEM && GET_CODE (XEXP (x, 0)) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (XEXP (x,0))) { enum machine_mode const_mode = get_pool_mode (XEXP (x, 0)); rtx new = gen_rtx (CONST, const_mode, get_pool_constant (XEXP (x, 0))); RTX_INTEGRATED_P (new) = 1; /* If the MEM was in a different mode than the constant (perhaps we were only looking at the low-order part), surround it with a SUBREG so we can save both modes. */ if (GET_MODE (x) != const_mode) { new = gen_rtx (SUBREG, GET_MODE (x), new, 0); RTX_INTEGRATED_P (new) = 1; } *px = new; save_constants (&XEXP (*px, 0)); } else if (GET_CODE (x) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (x)) { *px = gen_rtx (ADDRESS, get_pool_mode (x), get_pool_constant (x)); save_constants (&XEXP (*px, 0)); RTX_INTEGRATED_P (*px) = 1; } else { char *fmt = GET_RTX_FORMAT (GET_CODE (x)); int len = GET_RTX_LENGTH (GET_CODE (x)); for (i = len-1; i >= 0; i--) { switch (fmt[i]) { case 'E': for (j = 0; j < XVECLEN (x, i); j++) save_constants (&XVECEXP (x, i, j)); break; case 'e': if (XEXP (x, i) == 0) continue; if (i == 0) { /* Hack tail-recursion here. */ px = &XEXP (x, 0); goto again; } save_constants (&XEXP (x, i)); break; } } }}/* Note whether a parameter is modified or not. */static voidnote_modified_parmregs (reg, x) rtx reg; rtx x;{ if (GET_CODE (reg) == REG && in_nonparm_insns && REGNO (reg) < max_parm_reg && REGNO (reg) >= FIRST_PSEUDO_REGISTER && parmdecl_map[REGNO (reg)] != 0) TREE_READONLY (parmdecl_map[REGNO (reg)]) = 0;}/* Copy the rtx ORIG recursively, replacing pseudo-regs and labels according to `reg_map' and `label_map'. The original rtl insns will be saved for inlining; this is used to make a copy which is used to finish compiling the inline function itself. If we find a "saved" constant pool entry, one which was replaced with the value of the constant, convert it back to a constant pool entry. Since the pool wasn't touched, this should simply restore the old address. All other kinds of rtx are copied except those that can never be changed during compilation. */static rtxcopy_for_inline (orig) rtx orig;{ register rtx x = orig; register int i; register enum rtx_code code; register char *format_ptr; if (x == 0) return x; code = GET_CODE (x); /* These types may be freely shared. */ switch (code) { case QUEUED: case CONST_INT: case SYMBOL_REF: case PC: case CC0: return x; case CONST_DOUBLE: /* We have to make a new CONST_DOUBLE to ensure that we account for it correctly. Using the old CONST_DOUBLE_MEM data is wrong. */ if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT) { REAL_VALUE_TYPE d; REAL_VALUE_FROM_CONST_DOUBLE (d, x); return immed_real_const_1 (d, GET_MODE (x)); } else return immed_double_const (CONST_DOUBLE_LOW (x), CONST_DOUBLE_HIGH (x), VOIDmode); case CONST: /* Get constant pool entry for constant in the pool. */ if (RTX_INTEGRATED_P (x)) return validize_mem (force_const_mem (GET_MODE (x), copy_for_inline (XEXP (x, 0)))); break; case SUBREG: /* Get constant pool entry, but access in different mode. */ if (RTX_INTEGRATED_P (x)) { rtx new = force_const_mem (GET_MODE (SUBREG_REG (x)), copy_for_inline (XEXP (SUBREG_REG (x), 0))); PUT_MODE (new, GET_MODE (x)); return validize_mem (new); } break; case ADDRESS: /* If not special for constant pool error. Else get constant pool address. */ if (! RTX_INTEGRATED_P (x)) abort (); return XEXP (force_const_mem (GET_MODE (x), copy_for_inline (XEXP (x, 0))), 0); case ASM_OPERANDS: /* If a single asm insn contains multiple output operands then it contains multiple ASM_OPERANDS rtx's that share operand 3. We must make sure that the copied insn continues to share it. */ if (orig_asm_operands_vector == XVEC (orig, 3)) { x = rtx_alloc (ASM_OPERANDS); XSTR (x, 0) = XSTR (orig, 0); XSTR (x, 1) = XSTR (orig, 1); XINT (x, 2) = XINT (orig, 2); XVEC (x, 3) = copy_asm_operands_vector; XVEC (x, 4) = copy_asm_constraints_vector; XSTR (x, 5) = XSTR (orig, 5); XINT (x, 6) = XINT (orig, 6); return x; } break; case MEM: /* A MEM is usually allowed to be shared if its address is constant or is a constant plus one of the special registers. We do not allow sharing of addresses that are either a special register or the sum of a constant and a special register because it is possible for unshare_all_rtl to copy the address, into memory that won't be saved. Although the MEM can safely be shared, and won't be copied there, the address itself cannot be shared, and may need to be copied. There are also two exceptions with constants: The first is if the constant is a LABEL_REF or the sum of the LABEL_REF and an integer. This case can happen if we have an inline function that supplies a constant operand to the call of another inline function that uses it in a switch statement. In this case, we will be replacing the LABEL_REF, so we have to replace this MEM as well. The second case is if we have a (const (plus (address ..) ...)). In that case we need to put back the address of the constant pool entry. */ if (CONSTANT_ADDRESS_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 0)) != LABEL_REF && ! (GET_CODE (XEXP (x, 0)) == CONST && (GET_CODE (XEXP (XEXP (x, 0), 0)) == PLUS && ((GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == LABEL_REF) || (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == ADDRESS))))) return x; break; case LABEL_REF: { /* Must point to the new insn. */ return gen_rtx (LABEL_REF, GET_MODE (orig), label_map[CODE_LABEL_NUMBER (XEXP (orig, 0))]); } case REG: if (REGNO (x) > LAST_VIRTUAL_REGISTER) return reg_map [REGNO (x)]; else return x; case SET: /* If a parm that gets modified lives in a pseudo-reg, clear its TREE_READONLY to prevent certain optimizations. */ { rtx dest = SET_DEST (x); while (GET_CODE (dest) == STRICT_LOW_PART || GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == SUBREG) dest = XEXP (dest, 0); if (GET_CODE (dest) == REG && REGNO (dest) < max_parm_reg && REGNO (dest) >= FIRST_PSEUDO_REGISTER && parmdecl_map[REGNO (dest)] != 0 /* The insn to load an arg pseudo from a stack slot does not count as modifying it. */ && in_nonparm_insns) TREE_READONLY (parmdecl_map[REGNO (dest)]) = 0; } break;#if 0 /* This is a good idea, but here is the wrong place for it. */ /* Arrange that CONST_INTs always appear as the second operand if they appear, and that `frame_pointer_rtx' or `arg_pointer_rtx' always appear as the first. */ case PLUS: if (GET_CODE (XEXP (x, 0)) == CONST_INT || (XEXP (x, 1) == frame_pointer_rtx || (ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM && XEXP (x, 1) == arg_pointer_rtx))) { rtx t = XEXP (x, 0); XEXP (x, 0) = XEXP (x, 1); XEXP (x, 1) = t; } break;#endif } /* Replace this rtx with a copy of itself. */ x = rtx_alloc (code); bcopy (orig, x, (sizeof (*x) - sizeof (x->fld) + sizeof (x->fld[0]) * GET_RTX_LENGTH (code))); /* Now scan the subexpressions recursively. We can store any replaced subexpressions directly into X since we know X is not shared! Any vectors in X must be copied if X was copied. */ format_ptr = GET_RTX_FORMAT (code); for (i = 0; i < GET_RTX_LENGTH (code); i++) { switch (*format_ptr++) { case 'e': XEXP (x, i) = copy_for_inline (XEXP (x, i)); break; case 'u': /* Change any references to old-insns to point to the corresponding copied insns. */ XEXP (x, i) = insn_map[INSN_UID (XEXP (x, i))]; break; case 'E': if (XVEC (x, i) != NULL && XVECLEN (x, i) != 0) { register int j; XVEC (x, i) = gen_rtvec_v (XVECLEN (x, i), &XVECEXP (x, i, 0)); for (j = 0; j < XVECLEN (x, i); j++) XVECEXP (x, i, j) = copy_for_inline (XVECEXP (x, i, j)); } break; } } if (code == ASM_OPERANDS && orig_asm_operands_vector == 0) { orig_asm_operands_vector = XVEC (orig, 3); copy_asm_operands_vector = XVEC (x, 3); copy_asm_constraints_vector = XVEC (x, 4); } return x;}/* Unfortunately, we need a global copy of const_equiv map for communication with a function called from note_stores. Be *very* careful that this is used properly in the presence of recursion. */rtx *global_const_equiv_map;#define FIXED_BASE_PLUS_P(X) \ (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \ && GET_CODE (XEXP (X, 0)) == REG \ && REGNO (XEXP (X, 0)) >= FIRST_VIRTUAL_REGISTER \ && REGNO (XEXP (X, 0)) < LAST_VIRTUAL_REGISTER)/* Integrate the procedure defined by FNDECL. Note that this function may wind up calling itself. Since the static variables are not reentrant, we do not assign them until after the possibility of recursion is eliminated. If IGNORE is nonzero, do not produce a value. Otherwise store the value in TARGET if it is nonzero and that is convenient. Value is: (rtx)-1 if we could not substitute the function 0 if we substituted it and it does not produce a value else an rtx for where the value is stored. */rtxexpand_inline_function (fndecl, parms, target, ignore, type, structure_value_addr) tree fndecl, parms; rtx target; int ignore; tree type; rtx structure_value_addr;{ tree formal, actual, block; rtx header = DECL_SAVED_INSNS (fndecl); rtx insns = FIRST_FUNCTION_INSN (header); rtx parm_insns = FIRST_PARM_INSN (header); tree *arg_trees; rtx *arg_vals; rtx insn; int max_regno; register int i; int min_labelno = FIRST_LABELNO (header); int max_labelno = LAST_LABELNO (header); int nargs; rtx local_return_label = 0; rtx loc; rtx temp; struct inline_remap *map; rtx cc0_insn = 0; rtvec arg_vector = ORIGINAL_ARG_VECTOR (header); /* Allow for equivalences of the pseudos we make for virtual fp and ap. */ max_regno = MAX_REGNUM (header) + 3; if (max_regno < FIRST_PSEUDO_REGISTER) abort (); nargs = list_length (DECL_ARGUMENTS (fndecl)); /* We expect PARMS to have the right length; don't crash if not. */ if (list_length (parms) != nargs) return (rtx) (HOST_WIDE_INT) -1; /* Also check that the parms type match. Since the appropriate conversions or default promotions have already been applied, the machine modes should match exactly. */ for (formal = DECL_ARGUMENTS (fndecl), actual = parms; formal; formal = TREE_CHAIN (formal), actual = TREE_CHAIN (actual)) { tree arg = TREE_VALUE (actual); enum machine_mode mode = TYPE_MODE (DECL_ARG_TYPE (formal)); if (mode != TYPE_MODE (TREE_TYPE (arg))) return (rtx) (HOST_WIDE_INT) -1; /* If they are block mode, the types should match exactly. They don't match exactly if TREE_TYPE (FORMAL) == ERROR_MARK_NODE, which could happen if the parameter has incomplete type. */ if (mode == BLKmode && TREE_TYPE (arg) != TREE_TYPE (formal)) return (rtx) (HOST_WIDE_INT) -1;
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -