📄 pa.c
字号:
intarith_operand (rtx op, enum machine_mode mode){ return (register_operand (op, mode) || (GET_CODE (op) == CONST_INT && INT_14_BITS (op)));}/* Return truth value of whether OP can be used as an operand in a three operand arithmetic insn that accepts registers of mode MODE or 11-bit signed integers. */intarith11_operand (rtx op, enum machine_mode mode){ return (register_operand (op, mode) || (GET_CODE (op) == CONST_INT && INT_11_BITS (op)));}/* Return truth value of whether OP can be used as an operand in a adddi3 insn. */intadddi3_operand (rtx op, enum machine_mode mode){ return (register_operand (op, mode) || (GET_CODE (op) == CONST_INT && (TARGET_64BIT ? INT_14_BITS (op) : INT_11_BITS (op))));}/* A constant integer suitable for use in a PRE_MODIFY memory reference. */intpre_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED){ return (GET_CODE (op) == CONST_INT && INTVAL (op) >= -0x2000 && INTVAL (op) < 0x10);}/* A constant integer suitable for use in a POST_MODIFY memory reference. */intpost_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED){ return (GET_CODE (op) == CONST_INT && INTVAL (op) < 0x2000 && INTVAL (op) >= -0x10);}intarith_double_operand (rtx op, enum machine_mode mode){ return (register_operand (op, mode) || (GET_CODE (op) == CONST_DOUBLE && GET_MODE (op) == mode && VAL_14_BITS_P (CONST_DOUBLE_LOW (op)) && ((CONST_DOUBLE_HIGH (op) >= 0) == ((CONST_DOUBLE_LOW (op) & 0x1000) == 0))));}/* Return truth value of whether OP is an integer which fits the range constraining immediate operands in three-address insns, or is an integer register. */intireg_or_int5_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED){ return ((GET_CODE (op) == CONST_INT && INT_5_BITS (op)) || (GET_CODE (op) == REG && REGNO (op) > 0 && REGNO (op) < 32));}/* Return nonzero if OP is an integer register, else return zero. */intireg_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED){ return (GET_CODE (op) == REG && REGNO (op) > 0 && REGNO (op) < 32);}/* Return truth value of whether OP is an integer which fits the range constraining immediate operands in three-address insns. */intint5_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED){ return (GET_CODE (op) == CONST_INT && INT_5_BITS (op));}intuint5_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED){ return (GET_CODE (op) == CONST_INT && INT_U5_BITS (op));}intint11_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED){ return (GET_CODE (op) == CONST_INT && INT_11_BITS (op));}intuint32_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED){#if HOST_BITS_PER_WIDE_INT > 32 /* All allowed constants will fit a CONST_INT. */ return (GET_CODE (op) == CONST_INT && (INTVAL (op) >= 0 && INTVAL (op) < (HOST_WIDE_INT) 1 << 32));#else return (GET_CODE (op) == CONST_INT || (GET_CODE (op) == CONST_DOUBLE && CONST_DOUBLE_HIGH (op) == 0));#endif}intarith5_operand (rtx op, enum machine_mode mode){ return register_operand (op, mode) || int5_operand (op, mode);}/* True iff zdepi can be used to generate this CONST_INT. zdepi first sign extends a 5 bit signed number to a given field length, then places this field anywhere in a zero. */intzdepi_cint_p (unsigned HOST_WIDE_INT x){ unsigned HOST_WIDE_INT lsb_mask, t; /* This might not be obvious, but it's at least fast. This function is critical; we don't have the time loops would take. */ lsb_mask = x & -x; t = ((x >> 4) + lsb_mask) & ~(lsb_mask - 1); /* Return true iff t is a power of two. */ return ((t & (t - 1)) == 0);}/* True iff depi or extru can be used to compute (reg & mask). Accept bit pattern like these: 0....01....1 1....10....0 1..10..01..1 */intand_mask_p (unsigned HOST_WIDE_INT mask){ mask = ~mask; mask += mask & -mask; return (mask & (mask - 1)) == 0;}/* True iff depi or extru can be used to compute (reg & OP). */intand_operand (rtx op, enum machine_mode mode){ return (register_operand (op, mode) || (GET_CODE (op) == CONST_INT && and_mask_p (INTVAL (op))));}/* True iff depi can be used to compute (reg | MASK). */intior_mask_p (unsigned HOST_WIDE_INT mask){ mask += mask & -mask; return (mask & (mask - 1)) == 0;}/* True iff depi can be used to compute (reg | OP). */intior_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED){ return (GET_CODE (op) == CONST_INT && ior_mask_p (INTVAL (op)));}intlhs_lshift_operand (rtx op, enum machine_mode mode){ return register_operand (op, mode) || lhs_lshift_cint_operand (op, mode);}/* True iff OP is a CONST_INT of the forms 0...0xxxx or 0...01...1xxxx. Such values can be the left hand side x in (x << r), using the zvdepi instruction. */intlhs_lshift_cint_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED){ unsigned HOST_WIDE_INT x; if (GET_CODE (op) != CONST_INT) return 0; x = INTVAL (op) >> 4; return (x & (x + 1)) == 0;}intarith32_operand (rtx op, enum machine_mode mode){ return register_operand (op, mode) || GET_CODE (op) == CONST_INT;}intpc_or_label_operand (rtx op, enum machine_mode mode ATTRIBUTE_UNUSED){ return (GET_CODE (op) == PC || GET_CODE (op) == LABEL_REF);}/* Legitimize PIC addresses. If the address is already position-independent, we return ORIG. Newly generated position-independent addresses go to REG. If we need more than one register, we lose. */rtxlegitimize_pic_address (rtx orig, enum machine_mode mode, rtx reg){ rtx pic_ref = orig; /* Labels need special handling. */ if (pic_label_operand (orig, mode)) { /* We do not want to go through the movXX expanders here since that would create recursion. Nor do we really want to call a generator for a named pattern since that requires multiple patterns if we want to support multiple word sizes. So instead we just emit the raw set, which avoids the movXX expanders completely. */ mark_reg_pointer (reg, BITS_PER_UNIT); emit_insn (gen_rtx_SET (VOIDmode, reg, orig)); current_function_uses_pic_offset_table = 1; return reg; } if (GET_CODE (orig) == SYMBOL_REF) { rtx insn, tmp_reg; if (reg == 0) abort (); /* Before reload, allocate a temporary register for the intermediate result. This allows the sequence to be deleted when the final result is unused and the insns are trivially dead. */ tmp_reg = ((reload_in_progress || reload_completed) ? reg : gen_reg_rtx (Pmode)); emit_move_insn (tmp_reg, gen_rtx_PLUS (word_mode, pic_offset_table_rtx, gen_rtx_HIGH (word_mode, orig))); pic_ref = gen_const_mem (Pmode, gen_rtx_LO_SUM (Pmode, tmp_reg, gen_rtx_UNSPEC (Pmode, gen_rtvec (1, orig), UNSPEC_DLTIND14R))); current_function_uses_pic_offset_table = 1; mark_reg_pointer (reg, BITS_PER_UNIT); insn = emit_move_insn (reg, pic_ref); /* Put a REG_EQUAL note on this insn, so that it can be optimized. */ REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_EQUAL, orig, REG_NOTES (insn)); return reg; } else if (GET_CODE (orig) == CONST) { rtx base; if (GET_CODE (XEXP (orig, 0)) == PLUS && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx) return orig; if (reg == 0) abort (); if (GET_CODE (XEXP (orig, 0)) == PLUS) { base = legitimize_pic_address (XEXP (XEXP (orig, 0), 0), Pmode, reg); orig = legitimize_pic_address (XEXP (XEXP (orig, 0), 1), Pmode, base == reg ? 0 : reg); } else abort (); if (GET_CODE (orig) == CONST_INT) { if (INT_14_BITS (orig)) return plus_constant (base, INTVAL (orig)); orig = force_reg (Pmode, orig); } pic_ref = gen_rtx_PLUS (Pmode, base, orig); /* Likewise, should we set special REG_NOTEs here? */ } return pic_ref;}/* Try machine-dependent ways of modifying an illegitimate address to be legitimate. If we find one, return the new, valid address. This macro is used in only one place: `memory_address' in explow.c. OLDX is the address as it was before break_out_memory_refs was called. In some cases it is useful to look at this to decide what needs to be done. MODE and WIN are passed so that this macro can use GO_IF_LEGITIMATE_ADDRESS. It is always safe for this macro to do nothing. It exists to recognize opportunities to optimize the output. For the PA, transform: memory(X + <large int>) into: if (<large int> & mask) >= 16 Y = (<large int> & ~mask) + mask + 1 Round up. else Y = (<large int> & ~mask) Round down. Z = X + Y memory (Z + (<large int> - Y)); This is for CSE to find several similar references, and only use one Z. X can either be a SYMBOL_REF or REG, but because combine cannot perform a 4->2 combination we do nothing for SYMBOL_REF + D where D will not fit in 14 bits. MODE_FLOAT references allow displacements which fit in 5 bits, so use 0x1f as the mask. MODE_INT references allow displacements which fit in 14 bits, so use 0x3fff as the mask. This relies on the fact that most mode MODE_FLOAT references will use FP registers and most mode MODE_INT references will use integer registers. (In the rare case of an FP register used in an integer MODE, we depend on secondary reloads to clean things up.) It is also beneficial to handle (plus (mult (X) (Y)) (Z)) in a special manner if Y is 2, 4, or 8. (allows more shadd insns and shifted indexed addressing modes to be used). Put X and Z into registers. Then put the entire expression into a register. */rtxhppa_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED, enum machine_mode mode){ rtx orig = x; /* We need to canonicalize the order of operands in unscaled indexed addresses since the code that checks if an address is valid doesn't always try both orders. */ if (!TARGET_NO_SPACE_REGS && GET_CODE (x) == PLUS && GET_MODE (x) == Pmode && REG_P (XEXP (x, 0)) && REG_P (XEXP (x, 1)) && REG_POINTER (XEXP (x, 0)) && !REG_POINTER (XEXP (x, 1))) return gen_rtx_PLUS (Pmode, XEXP (x, 1), XEXP (x, 0)); if (flag_pic) return legitimize_pic_address (x, mode, gen_reg_rtx (Pmode)); /* Strip off CONST. */ if (GET_CODE (x) == CONST) x = XEXP (x, 0); /* Special case. Get the SYMBOL_REF into a register and use indexing. That should always be safe. */ if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 0)) == REG && GET_CODE (XEXP (x, 1)) == SYMBOL_REF) { rtx reg = force_reg (Pmode, XEXP (x, 1)); return force_reg (Pmode, gen_rtx_PLUS (Pmode, reg, XEXP (x, 0))); } /* Note we must reject symbols which represent function addresses since the assembler/linker can't handle arithmetic on plabels. */ if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT && ((GET_CODE (XEXP (x, 0)) == SYMBOL_REF && !FUNCTION_NAME_P (XSTR (XEXP (x, 0), 0))) || GET_CODE (XEXP (x, 0)) == REG)) { rtx int_part, ptr_reg; int newoffset; int offset = INTVAL (XEXP (x, 1)); int mask; mask = (GET_MODE_CLASS (mode) == MODE_FLOAT ? (TARGET_PA_20 ? 0x3fff : 0x1f) : 0x3fff); /* Choose which way to round the offset. Round up if we are >= halfway to the next boundary. */ if ((offset & mask) >= ((mask + 1) / 2)) newoffset = (offset & ~ mask) + mask + 1; else newoffset = (offset & ~ mask); /* If the newoffset will not fit in 14 bits (ldo), then handling this would take 4 or 5 instructions (2 to load the SYMBOL_REF + 1 or 2 to load the newoffset + 1 to add the new offset and the SYMBOL_REF.) Combine can not handle 4->2 or 5->2 combinations, so do not create them. */ if (! VAL_14_BITS_P (newoffset) && GET_CODE (XEXP (x, 0)) == SYMBOL_REF) { rtx const_part = plus_constant (XEXP (x, 0), newoffset); rtx tmp_reg = force_reg (Pmode, gen_rtx_HIGH (Pmode, const_part)); ptr_reg = force_reg (Pmode, gen_rtx_LO_SUM (Pmode, tmp_reg, const_part)); } else { if (! VAL_14_BITS_P (newoffset)) int_part = force_reg (Pmode, GEN_INT (newoffset)); else int_part = GEN_INT (newoffset); ptr_reg = force_reg (Pmode, gen_rtx_PLUS (Pmode, force_reg (Pmode, XEXP (x, 0)), int_part)); } return plus_constant (ptr_reg, offset - newoffset);
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -