📄 integrate.c
字号:
save_constants (®_NOTES (insn)); } /* Record what interesting things happen to our parameters. */ note_stores (PATTERN (insn), note_modified_parmregs); } } /* We have now allocated all that needs to be allocated permanently on the rtx obstack. Set our high-water mark, so that we can free the rest of this when the time comes. */ preserve_data (); finish_inline (fndecl, head);}/* Given PX, a pointer into an insn, search for references to the constant pool. Replace each with a CONST that has the mode of the original constant, contains the constant, and has RTX_INTEGRATED_P set. Similarly, constant pool addresses not enclosed in a MEM are replaced with an ADDRESS rtx which also gives the constant, mode, and has RTX_INTEGRATED_P set. */static voidsave_constants (px) rtx *px;{ rtx x; int i, j; again: x = *px; /* If this is a CONST_DOUBLE, don't try to fix things up in CONST_DOUBLE_MEM, because this is an infinite recursion. */ if (GET_CODE (x) == CONST_DOUBLE) return; else if (GET_CODE (x) == MEM && GET_CODE (XEXP (x, 0)) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (XEXP (x,0))) { enum machine_mode const_mode = get_pool_mode (XEXP (x, 0)); rtx new = gen_rtx (CONST, const_mode, get_pool_constant (XEXP (x, 0))); RTX_INTEGRATED_P (new) = 1; /* If the MEM was in a different mode than the constant (perhaps we were only looking at the low-order part), surround it with a SUBREG so we can save both modes. */ if (GET_MODE (x) != const_mode) { new = gen_rtx (SUBREG, GET_MODE (x), new, 0); RTX_INTEGRATED_P (new) = 1; } *px = new; save_constants (&XEXP (*px, 0)); } else if (GET_CODE (x) == SYMBOL_REF && CONSTANT_POOL_ADDRESS_P (x)) { *px = gen_rtx (ADDRESS, get_pool_mode (x), get_pool_constant (x)); save_constants (&XEXP (*px, 0)); RTX_INTEGRATED_P (*px) = 1; } else { char *fmt = GET_RTX_FORMAT (GET_CODE (x)); int len = GET_RTX_LENGTH (GET_CODE (x)); for (i = len-1; i >= 0; i--) { switch (fmt[i]) { case 'E': for (j = 0; j < XVECLEN (x, i); j++) save_constants (&XVECEXP (x, i, j)); break; case 'e': if (XEXP (x, i) == 0) continue; if (i == 0) { /* Hack tail-recursion here. */ px = &XEXP (x, 0); goto again; } save_constants (&XEXP (x, i)); break; } } }}/* Note whether a parameter is modified or not. */static voidnote_modified_parmregs (reg, x) rtx reg; rtx x;{ if (GET_CODE (reg) == REG && in_nonparm_insns && REGNO (reg) < max_parm_reg && REGNO (reg) >= FIRST_PSEUDO_REGISTER && parmdecl_map[REGNO (reg)] != 0) TREE_READONLY (parmdecl_map[REGNO (reg)]) = 0;}/* Copy the rtx ORIG recursively, replacing pseudo-regs and labels according to `reg_map' and `label_map'. The original rtl insns will be saved for inlining; this is used to make a copy which is used to finish compiling the inline function itself. If we find a "saved" constant pool entry, one which was replaced with the value of the constant, convert it back to a constant pool entry. Since the pool wasn't touched, this should simply restore the old address. All other kinds of rtx are copied except those that can never be changed during compilation. */static rtxcopy_for_inline (orig) rtx orig;{ register rtx x = orig; register int i; register enum rtx_code code; register char *format_ptr; if (x == 0) return x; code = GET_CODE (x); /* These types may be freely shared. */ switch (code) { case QUEUED: case CONST_INT: case SYMBOL_REF: case PC: case CC0: return x; case CONST_DOUBLE: /* We have to make a new CONST_DOUBLE to ensure that we account for it correctly. Using the old CONST_DOUBLE_MEM data is wrong. */ if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT) { REAL_VALUE_TYPE d; REAL_VALUE_FROM_CONST_DOUBLE (d, x); return CONST_DOUBLE_FROM_REAL_VALUE (d, GET_MODE (x)); } else return immed_double_const (CONST_DOUBLE_LOW (x), CONST_DOUBLE_HIGH (x), VOIDmode); case CONST: /* Get constant pool entry for constant in the pool. */ if (RTX_INTEGRATED_P (x)) return validize_mem (force_const_mem (GET_MODE (x), copy_for_inline (XEXP (x, 0)))); break; case SUBREG: /* Get constant pool entry, but access in different mode. */ if (RTX_INTEGRATED_P (x)) { rtx new = force_const_mem (GET_MODE (SUBREG_REG (x)), copy_for_inline (XEXP (SUBREG_REG (x), 0))); PUT_MODE (new, GET_MODE (x)); return validize_mem (new); } break; case ADDRESS: /* If not special for constant pool error. Else get constant pool address. */ if (! RTX_INTEGRATED_P (x)) abort (); return XEXP (force_const_mem (GET_MODE (x), copy_for_inline (XEXP (x, 0))), 0); case ASM_OPERANDS: /* If a single asm insn contains multiple output operands then it contains multiple ASM_OPERANDS rtx's that share operand 3. We must make sure that the copied insn continues to share it. */ if (orig_asm_operands_vector == XVEC (orig, 3)) { x = rtx_alloc (ASM_OPERANDS); x->volatil = orig->volatil; XSTR (x, 0) = XSTR (orig, 0); XSTR (x, 1) = XSTR (orig, 1); XINT (x, 2) = XINT (orig, 2); XVEC (x, 3) = copy_asm_operands_vector; XVEC (x, 4) = copy_asm_constraints_vector; XSTR (x, 5) = XSTR (orig, 5); XINT (x, 6) = XINT (orig, 6); return x; } break; case MEM: /* A MEM is usually allowed to be shared if its address is constant or is a constant plus one of the special registers. We do not allow sharing of addresses that are either a special register or the sum of a constant and a special register because it is possible for unshare_all_rtl to copy the address, into memory that won't be saved. Although the MEM can safely be shared, and won't be copied there, the address itself cannot be shared, and may need to be copied. There are also two exceptions with constants: The first is if the constant is a LABEL_REF or the sum of the LABEL_REF and an integer. This case can happen if we have an inline function that supplies a constant operand to the call of another inline function that uses it in a switch statement. In this case, we will be replacing the LABEL_REF, so we have to replace this MEM as well. The second case is if we have a (const (plus (address ..) ...)). In that case we need to put back the address of the constant pool entry. */ if (CONSTANT_ADDRESS_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 0)) != LABEL_REF && ! (GET_CODE (XEXP (x, 0)) == CONST && (GET_CODE (XEXP (XEXP (x, 0), 0)) == PLUS && ((GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == LABEL_REF) || (GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == ADDRESS))))) return x; break; case LABEL_REF: /* If this is a non-local label, just make a new LABEL_REF. Otherwise, use the new label as well. */ x = gen_rtx (LABEL_REF, GET_MODE (orig), LABEL_REF_NONLOCAL_P (orig) ? XEXP (orig, 0) : label_map[CODE_LABEL_NUMBER (XEXP (orig, 0))]); LABEL_REF_NONLOCAL_P (x) = LABEL_REF_NONLOCAL_P (orig); LABEL_OUTSIDE_LOOP_P (x) = LABEL_OUTSIDE_LOOP_P (orig); return x; case REG: if (REGNO (x) > LAST_VIRTUAL_REGISTER) return reg_map [REGNO (x)]; else return x; case SET: /* If a parm that gets modified lives in a pseudo-reg, clear its TREE_READONLY to prevent certain optimizations. */ { rtx dest = SET_DEST (x); while (GET_CODE (dest) == STRICT_LOW_PART || GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == SUBREG) dest = XEXP (dest, 0); if (GET_CODE (dest) == REG && REGNO (dest) < max_parm_reg && REGNO (dest) >= FIRST_PSEUDO_REGISTER && parmdecl_map[REGNO (dest)] != 0 /* The insn to load an arg pseudo from a stack slot does not count as modifying it. */ && in_nonparm_insns) TREE_READONLY (parmdecl_map[REGNO (dest)]) = 0; } break;#if 0 /* This is a good idea, but here is the wrong place for it. */ /* Arrange that CONST_INTs always appear as the second operand if they appear, and that `frame_pointer_rtx' or `arg_pointer_rtx' always appear as the first. */ case PLUS: if (GET_CODE (XEXP (x, 0)) == CONST_INT || (XEXP (x, 1) == frame_pointer_rtx || (ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM && XEXP (x, 1) == arg_pointer_rtx))) { rtx t = XEXP (x, 0); XEXP (x, 0) = XEXP (x, 1); XEXP (x, 1) = t; } break;#endif } /* Replace this rtx with a copy of itself. */ x = rtx_alloc (code); bcopy ((char *) orig, (char *) x, (sizeof (*x) - sizeof (x->fld) + sizeof (x->fld[0]) * GET_RTX_LENGTH (code))); /* Now scan the subexpressions recursively. We can store any replaced subexpressions directly into X since we know X is not shared! Any vectors in X must be copied if X was copied. */ format_ptr = GET_RTX_FORMAT (code); for (i = 0; i < GET_RTX_LENGTH (code); i++) { switch (*format_ptr++) { case 'e': XEXP (x, i) = copy_for_inline (XEXP (x, i)); break; case 'u': /* Change any references to old-insns to point to the corresponding copied insns. */ XEXP (x, i) = insn_map[INSN_UID (XEXP (x, i))]; break; case 'E': if (XVEC (x, i) != NULL && XVECLEN (x, i) != 0) { register int j; XVEC (x, i) = gen_rtvec_v (XVECLEN (x, i), &XVECEXP (x, i, 0)); for (j = 0; j < XVECLEN (x, i); j++) XVECEXP (x, i, j) = copy_for_inline (XVECEXP (x, i, j)); } break; } } if (code == ASM_OPERANDS && orig_asm_operands_vector == 0) { orig_asm_operands_vector = XVEC (orig, 3); copy_asm_operands_vector = XVEC (x, 3); copy_asm_constraints_vector = XVEC (x, 4); } return x;}/* Unfortunately, we need a global copy of const_equiv map for communication with a function called from note_stores. Be *very* careful that this is used properly in the presence of recursion. */rtx *global_const_equiv_map;int global_const_equiv_map_size;#define FIXED_BASE_PLUS_P(X) \ (GET_CODE (X) == PLUS && GET_CODE (XEXP (X, 1)) == CONST_INT \ && GET_CODE (XEXP (X, 0)) == REG \ && REGNO (XEXP (X, 0)) >= FIRST_VIRTUAL_REGISTER \ && REGNO (XEXP (X, 0)) <= LAST_VIRTUAL_REGISTER)/* Integrate the procedure defined by FNDECL. Note that this function may wind up calling itself. Since the static variables are not reentrant, we do not assign them until after the possibility of recursion is eliminated. If IGNORE is nonzero, do not produce a value. Otherwise store the value in TARGET if it is nonzero and that is convenient. Value is: (rtx)-1 if we could not substitute the function 0 if we substituted it and it does not produce a value else an rtx for where the value is stored. */rtxexpand_inline_function (fndecl, parms, target, ignore, type, structure_value_addr) tree fndecl, parms; rtx target; int ignore; tree type; rtx structure_value_addr;{ tree formal, actual, block;
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -