sparc.c
来自「GCC编译器源代码」· C语言 代码 · 共 2,284 行 · 第 1/5 页
C
2,284 行
|| (GET_CODE (op) == CONST_INT && (GET_MODE (op) == mode || GET_MODE (op) == VOIDmode) && (unsigned HOST_WIDE_INT) (INTVAL (op) + 0x400) < 0x800));}/* Return true if OP is a register, or is a CONST_INT or CONST_DOUBLE that can fit in an 10 bit immediate field. This is an acceptable DImode operand for the movrcc instructions. *//* ??? Replace with arith10_operand? */intarith10_double_operand (op, mode) rtx op; enum machine_mode mode;{ return (register_operand (op, mode) || (GET_CODE (op) == CONST_DOUBLE && (GET_MODE (op) == mode || GET_MODE (op) == VOIDmode) && (unsigned) (CONST_DOUBLE_LOW (op) + 0x200) < 0x400 && ((CONST_DOUBLE_HIGH (op) == -1 && (CONST_DOUBLE_LOW (op) & 0x200) == 0x200) || (CONST_DOUBLE_HIGH (op) == 0 && (CONST_DOUBLE_LOW (op) & 0x200) == 0))) || (GET_CODE (op) == CONST_INT && (GET_MODE (op) == mode || GET_MODE (op) == VOIDmode) && (unsigned HOST_WIDE_INT) (INTVAL (op) + 0x200) < 0x400));}/* Return truth value of whether OP is a integer which fits the range constraining immediate operands in most three-address insns, which have a 13 bit immediate field. */intsmall_int (op, mode) rtx op; enum machine_mode mode;{ return (GET_CODE (op) == CONST_INT && SMALL_INT (op));}/* Recognize operand values for the umul instruction. That instruction sign extends immediate values just like all other sparc instructions, but interprets the extended result as an unsigned number. */intuns_small_int (op, mode) rtx op; enum machine_mode mode;{#if HOST_BITS_PER_WIDE_INT > 32 /* All allowed constants will fit a CONST_INT. */ return (GET_CODE (op) == CONST_INT && ((INTVAL (op) >= 0 && INTVAL (op) < 0x1000) || (INTVAL (op) >= 0xFFFFF000 && INTVAL (op) < 0x100000000L)));#else return ((GET_CODE (op) == CONST_INT && (unsigned) INTVAL (op) < 0x1000) || (GET_CODE (op) == CONST_DOUBLE && CONST_DOUBLE_HIGH (op) == 0 && (unsigned) CONST_DOUBLE_LOW (op) - 0xFFFFF000 < 0x1000));#endif}intuns_arith_operand (op, mode) rtx op; enum machine_mode mode;{ return register_operand (op, mode) || uns_small_int (op, mode);}/* Return truth value of statement that OP is a call-clobbered register. */intclobbered_register (op, mode) rtx op; enum machine_mode mode;{ return (GET_CODE (op) == REG && call_used_regs[REGNO (op)]);}/* X and Y are two things to compare using CODE. Emit the compare insn and return the rtx for the cc reg in the proper mode. */rtxgen_compare_reg (code, x, y) enum rtx_code code; rtx x, y;{ enum machine_mode mode = SELECT_CC_MODE (code, x, y); rtx cc_reg; /* ??? We don't have movcc patterns so we cannot generate pseudo regs for the fcc regs (cse can't tell they're really call clobbered regs and will remove a duplicate comparison even if there is an intervening function call - it will then try to reload the cc reg via an int reg which is why we need the movcc patterns). It is possible to provide the movcc patterns by using the ldxfsr/stxfsr v9 insns. I tried it: you need two registers (say %g1,%g5) and it takes about 6 insns. A better fix would be to tell cse that CCFPE mode registers (even pseudos) are call clobbered. */ /* ??? This is an experiment. Rather than making changes to cse which may or may not be easy/clean, we do our own cse. This is possible because we will generate hard registers. Cse knows they're call clobbered (it doesn't know the same thing about pseudos). If we guess wrong, no big deal, but if we win, great! */ if (TARGET_V9 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)#if 1 /* experiment */ { int reg; /* We cycle through the registers to ensure they're all exercised. */ static int next_fcc_reg = 0; /* Previous x,y for each fcc reg. */ static rtx prev_args[4][2]; /* Scan prev_args for x,y. */ for (reg = 0; reg < 4; reg++) if (prev_args[reg][0] == x && prev_args[reg][1] == y) break; if (reg == 4) { reg = next_fcc_reg; prev_args[reg][0] = x; prev_args[reg][1] = y; next_fcc_reg = (next_fcc_reg + 1) & 3; } cc_reg = gen_rtx (REG, mode, reg + SPARC_FIRST_V9_FCC_REG); }#else cc_reg = gen_reg_rtx (mode);#endif /* ! experiment */ else if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT) cc_reg = gen_rtx (REG, mode, SPARC_FCC_REG); else cc_reg = gen_rtx (REG, mode, SPARC_ICC_REG); emit_insn (gen_rtx (SET, VOIDmode, cc_reg, gen_rtx (COMPARE, mode, x, y))); return cc_reg;}/* This function is used for v9 only. CODE is the code for an Scc's comparison. OPERANDS[0] is the target of the Scc insn. OPERANDS[1] is the value we compare against const0_rtx (which hasn't been generated yet). This function is needed to turn (set (reg:SI 110) (gt (reg:CCX 100 %icc) (const_int 0))) into (set (reg:SI 110) (gt:DI (reg:CCX 100 %icc) (const_int 0))) IE: The instruction recognizer needs to see the mode of the comparison to find the right instruction. We could use "gt:DI" right in the define_expand, but leaving it out allows us to handle DI, SI, etc. We refer to the global sparc compare operands sparc_compare_op0 and sparc_compare_op1. */intgen_v9_scc (compare_code, operands) enum rtx_code compare_code; register rtx *operands;{ rtx temp, op0, op1; if (! TARGET_ARCH64 && (GET_MODE (sparc_compare_op0) == DImode || GET_MODE (operands[0]) == DImode)) return 0; /* Handle the case where operands[0] == sparc_compare_op0. We "early clobber" the result. */ if (REGNO (operands[0]) == REGNO (sparc_compare_op0)) { op0 = gen_reg_rtx (GET_MODE (sparc_compare_op0)); emit_move_insn (op0, sparc_compare_op0); } else op0 = sparc_compare_op0; /* For consistency in the following. */ op1 = sparc_compare_op1; /* Try to use the movrCC insns. */ if (TARGET_ARCH64 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT && op1 == const0_rtx && v9_regcmp_p (compare_code)) { /* Special case for op0 != 0. This can be done with one instruction if operands[0] == sparc_compare_op0. We don't assume they are equal now though. */ if (compare_code == NE && GET_MODE (operands[0]) == DImode && GET_MODE (op0) == DImode) { emit_insn (gen_rtx (SET, VOIDmode, operands[0], op0)); emit_insn (gen_rtx (SET, VOIDmode, operands[0], gen_rtx (IF_THEN_ELSE, DImode, gen_rtx (compare_code, DImode, op0, const0_rtx), const1_rtx, operands[0]))); return 1; } emit_insn (gen_rtx (SET, VOIDmode, operands[0], const0_rtx)); if (GET_MODE (op0) != DImode) { temp = gen_reg_rtx (DImode); convert_move (temp, op0, 0); } else temp = op0; emit_insn (gen_rtx (SET, VOIDmode, operands[0], gen_rtx (IF_THEN_ELSE, GET_MODE (operands[0]), gen_rtx (compare_code, DImode, temp, const0_rtx), const1_rtx, operands[0]))); return 1; } else { operands[1] = gen_compare_reg (compare_code, op0, op1); switch (GET_MODE (operands[1])) { case CCmode : case CCXmode : case CCFPEmode : case CCFPmode : break; default : abort (); } emit_insn (gen_rtx (SET, VOIDmode, operands[0], const0_rtx)); emit_insn (gen_rtx (SET, VOIDmode, operands[0], gen_rtx (IF_THEN_ELSE, GET_MODE (operands[0]), gen_rtx (compare_code, GET_MODE (operands[1]), operands[1], const0_rtx), const1_rtx, operands[0]))); return 1; }}/* Emit a conditional jump insn for the v9 architecture using comparison code CODE and jump target LABEL. This function exists to take advantage of the v9 brxx insns. */voidemit_v9_brxx_insn (code, op0, label) enum rtx_code code; rtx op0, label;{ emit_jump_insn (gen_rtx (SET, VOIDmode, pc_rtx, gen_rtx (IF_THEN_ELSE, VOIDmode, gen_rtx (code, GET_MODE (op0), op0, const0_rtx), gen_rtx (LABEL_REF, VOIDmode, label), pc_rtx)));}/* Return nonzero if a return peephole merging return with setting of output register is ok. */intleaf_return_peephole_ok (){ return (actual_fsize == 0);}/* Return nonzero if TRIAL can go into the function epilogue's delay slot. SLOT is the slot we are trying to fill. */inteligible_for_epilogue_delay (trial, slot) rtx trial; int slot;{ rtx pat, src; if (slot >= 1) return 0; if (GET_CODE (trial) != INSN || GET_CODE (PATTERN (trial)) != SET) return 0; if (get_attr_length (trial) != 1) return 0; /* If %g0 is live, there are lots of things we can't handle. Rather than trying to find them all now, let's punt and only optimize things as necessary. */ if (TARGET_LIVE_G0) return 0; /* In the case of a true leaf function, anything can go into the delay slot. A delay slot only exists however if the frame size is zero, otherwise we will put an insn to adjust the stack after the return. */ if (leaf_function) { if (leaf_return_peephole_ok ()) return ((get_attr_in_uncond_branch_delay (trial) == IN_BRANCH_DELAY_TRUE)); return 0; } /* If only trivial `restore' insns work, nothing can go in the delay slot. */ else if (TARGET_BROKEN_SAVERESTORE) return 0; pat = PATTERN (trial); /* Otherwise, only operations which can be done in tandem with a `restore' insn can go into the delay slot. */ if (GET_CODE (SET_DEST (pat)) != REG || REGNO (SET_DEST (pat)) >= 32 || REGNO (SET_DEST (pat)) < 24) return 0; /* The set of insns matched here must agree precisely with the set of patterns paired with a RETURN in sparc.md. */ src = SET_SRC (pat); /* This matches "*return_[qhs]". */ if (arith_operand (src, GET_MODE (src))) return GET_MODE_SIZE (GET_MODE (src)) <= GET_MODE_SIZE (SImode); /* This matches "*return_di". */ else if (arith_double_operand (src, GET_MODE (src))) return GET_MODE_SIZE (GET_MODE (src)) <= GET_MODE_SIZE (DImode); /* This matches "*return_sf_no_fpu". */ else if (! TARGET_FPU && restore_operand (SET_DEST (pat), SFmode) && register_operand (src, SFmode)) return 1; /* This matches "*return_addsi". */ else if (GET_CODE (src) == PLUS && arith_operand (XEXP (src, 0), SImode) && arith_operand (XEXP (src, 1), SImode) && (register_operand (XEXP (src, 0), SImode) || register_operand (XEXP (src, 1), SImode))) return 1; /* This matches "*return_adddi". */ else if (GET_CODE (src) == PLUS && arith_double_operand (XEXP (src, 0), DImode) && arith_double_operand (XEXP (src, 1), DImode) && (register_operand (XEXP (src, 0), DImode) || register_operand (XEXP (src, 1), DImode))) return 1; /* This matches "*return_subsi". */ else if (GET_CODE (src) == MINUS && register_operand (XEXP (src, 0), SImode) && small_int (XEXP (src, 1), VOIDmode) && INTVAL (XEXP (src, 1)) != -4096) return 1; return 0;}intshort_branch (uid1, uid2) int uid1, uid2;{ unsigned int delta = insn_addresses[uid1] - insn_addresses[uid2]; if (delta + 1024 < 2048) return 1; /* warning ("long branch, distance %d", delta); */ return 0;}/* Return non-zero if REG is not used after INSN. We assume REG is a reload reg, and therefore does not live past labels or calls or jumps. */intreg_unused_after (reg, insn) rtx reg; rtx insn;{ enum rtx_code code, prev_code = UNKNOWN; while (insn = NEXT_INSN (insn)) { if (prev_code == CALL_INSN && call_used_regs[REGNO (reg)]) return 1; code = GET_CODE (insn); if (GET_CODE (insn) == CODE_LABEL) return 1; if (GET_RTX_CLASS (code) == 'i') { rtx set = single_set (insn); int in_src = set && reg_overlap_mentioned_p (reg, SET_SRC (set)); if (set && in_src) return 0; if (set && reg_overlap_mentioned_p (reg, SET_DEST (set))) return 1; if (set == 0 && reg_overlap_mentioned_p (reg, PATTERN (insn))) return 0; } prev_code = code; } return 1;}/* The table we use to reference PIC data. */static rtx global_offset_table;/* Ensure that we are not using patterns that are not OK with PIC. */intcheck_pic (i) int i;{ switch (flag_pic) { case 1: if (GET_CODE (recog_operand[i]) == SYMBOL_REF || (GET_CODE (recog_operand[i]) == CONST && ! (GET_CODE (XEXP (recog_operand[i], 0)) == MINUS && (XEXP (XEXP (recog_operand[i], 0), 0) == global_offset_table) && (GET_CODE (XEXP (XEXP (recog_operand[i], 0), 1)) == CONST)))) abort (); case 2: default: return 1; }}/* Return true if X is an address which needs a temporary register when reloaded while generating PIC code. */intpic_address_needs_scratch (x) rtx x;{ /* An address which is a symbolic plus a non SMALL_INT needs a temp reg. */ if (GET_CODE (x) == CONST && GET_CODE (XEXP (x, 0)) == PLUS && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
⌨️ 快捷键说明
复制代码Ctrl + C
搜索代码Ctrl + F
全屏模式F11
增大字号Ctrl + =
减小字号Ctrl + -
显示快捷键?