📄 sparc.c
字号:
{ HOST_WIDE_INT value = INTVAL (op1); if (SPARC_SETHI_P (value & GET_MODE_MASK (mode)) || SPARC_SIMM13_P (value)) abort (); } /* Full 2-insn decomposition is needed. */ if (reload_in_progress || reload_completed) temp = op0; else temp = gen_reg_rtx (mode); if (GET_CODE (op1) == CONST_INT) { /* Emit them as real moves instead of a HIGH/LO_SUM, this way CSE can see everything and reuse intermediate values if it wants. */ if (TARGET_ARCH64 && HOST_BITS_PER_WIDE_INT != 64 && (INTVAL (op1) & 0x80000000) != 0) emit_insn (gen_rtx_SET (VOIDmode, temp, immed_double_const (INTVAL (op1) & ~(HOST_WIDE_INT)0x3ff, 0, DImode))); else emit_insn (gen_rtx_SET (VOIDmode, temp, GEN_INT (INTVAL (op1) & ~(HOST_WIDE_INT)0x3ff))); emit_insn (gen_rtx_SET (VOIDmode, op0, gen_rtx_IOR (mode, temp, GEN_INT (INTVAL (op1) & 0x3ff)))); } else { /* A symbol, emit in the traditional way. */ emit_insn (gen_rtx_SET (VOIDmode, temp, gen_rtx_HIGH (mode, op1))); emit_insn (gen_rtx_SET (VOIDmode, op0, gen_rtx_LO_SUM (mode, temp, op1))); }}/* SPARC-v9 code-model support. */voidsparc_emit_set_symbolic_const64 (op0, op1, temp1) rtx op0; rtx op1; rtx temp1;{ rtx ti_temp1 = 0; if (temp1 && GET_MODE (temp1) == TImode) { ti_temp1 = temp1; temp1 = gen_rtx_REG (DImode, REGNO (temp1)); } switch (sparc_cmodel) { case CM_MEDLOW: /* The range spanned by all instructions in the object is less than 2^31 bytes (2GB) and the distance from any instruction to the location of the label _GLOBAL_OFFSET_TABLE_ is less than 2^31 bytes (2GB). The executable must be in the low 4TB of the virtual address space. sethi %hi(symbol), %temp or %temp, %lo(symbol), %reg */ emit_insn (gen_rtx_SET (VOIDmode, temp1, gen_rtx_HIGH (DImode, op1))); emit_insn (gen_rtx_SET (VOIDmode, op0, gen_rtx_LO_SUM (DImode, temp1, op1))); break; case CM_MEDMID: /* The range spanned by all instructions in the object is less than 2^31 bytes (2GB) and the distance from any instruction to the location of the label _GLOBAL_OFFSET_TABLE_ is less than 2^31 bytes (2GB). The executable must be in the low 16TB of the virtual address space. sethi %h44(symbol), %temp1 or %temp1, %m44(symbol), %temp2 sllx %temp2, 12, %temp3 or %temp3, %l44(symbol), %reg */ emit_insn (gen_seth44 (op0, op1)); emit_insn (gen_setm44 (op0, op0, op1)); emit_insn (gen_rtx_SET (VOIDmode, temp1, gen_rtx_ASHIFT (DImode, op0, GEN_INT (12)))); emit_insn (gen_setl44 (op0, temp1, op1)); break; case CM_MEDANY: /* The range spanned by all instructions in the object is less than 2^31 bytes (2GB) and the distance from any instruction to the location of the label _GLOBAL_OFFSET_TABLE_ is less than 2^31 bytes (2GB). The executable can be placed anywhere in the virtual address space. sethi %hh(symbol), %temp1 sethi %lm(symbol), %temp2 or %temp1, %hm(symbol), %temp3 or %temp2, %lo(symbol), %temp4 sllx %temp3, 32, %temp5 or %temp4, %temp5, %reg */ /* It is possible that one of the registers we got for operands[2] might coincide with that of operands[0] (which is why we made it TImode). Pick the other one to use as our scratch. */ if (rtx_equal_p (temp1, op0)) { if (ti_temp1) temp1 = gen_rtx_REG (DImode, REGNO (temp1) + 1); else abort(); } emit_insn (gen_sethh (op0, op1)); emit_insn (gen_setlm (temp1, op1)); emit_insn (gen_sethm (op0, op0, op1)); emit_insn (gen_rtx_SET (VOIDmode, op0, gen_rtx_ASHIFT (DImode, op0, GEN_INT (32)))); emit_insn (gen_rtx_SET (VOIDmode, op0, gen_rtx_PLUS (DImode, op0, temp1))); emit_insn (gen_setlo (op0, op0, op1)); break; case CM_EMBMEDANY: /* Old old old backwards compatibility kruft here. Essentially it is MEDLOW with a fixed 64-bit virtual base added to all data segment addresses. Text-segment stuff is computed like MEDANY, we can't reuse the code above because the relocation knobs look different. Data segment: sethi %hi(symbol), %temp1 or %temp1, %lo(symbol), %temp2 add %temp2, EMBMEDANY_BASE_REG, %reg Text segment: sethi %uhi(symbol), %temp1 sethi %hi(symbol), %temp2 or %temp1, %ulo(symbol), %temp3 or %temp2, %lo(symbol), %temp4 sllx %temp3, 32, %temp5 or %temp4, %temp5, %reg */ if (data_segment_operand (op1, GET_MODE (op1))) { emit_insn (gen_embmedany_sethi (temp1, op1)); emit_insn (gen_embmedany_brsum (op0, temp1)); emit_insn (gen_embmedany_losum (op0, op0, op1)); } else { /* It is possible that one of the registers we got for operands[2] might coincide with that of operands[0] (which is why we made it TImode). Pick the other one to use as our scratch. */ if (rtx_equal_p (temp1, op0)) { if (ti_temp1) temp1 = gen_rtx_REG (DImode, REGNO (temp1) + 1); else abort(); } emit_insn (gen_embmedany_textuhi (op0, op1)); emit_insn (gen_embmedany_texthi (temp1, op1)); emit_insn (gen_embmedany_textulo (op0, op0, op1)); emit_insn (gen_rtx_SET (VOIDmode, op0, gen_rtx_ASHIFT (DImode, op0, GEN_INT (32)))); emit_insn (gen_rtx_SET (VOIDmode, op0, gen_rtx_PLUS (DImode, op0, temp1))); emit_insn (gen_embmedany_textlo (op0, op0, op1)); } break; default: abort(); }}/* These avoid problems when cross compiling. If we do not go through all this hair then the optimizer will see invalid REG_EQUAL notes or in some cases none at all. */static void sparc_emit_set_safe_HIGH64 PARAMS ((rtx, HOST_WIDE_INT));static rtx gen_safe_SET64 PARAMS ((rtx, HOST_WIDE_INT));static rtx gen_safe_OR64 PARAMS ((rtx, HOST_WIDE_INT));static rtx gen_safe_XOR64 PARAMS ((rtx, HOST_WIDE_INT));#if HOST_BITS_PER_WIDE_INT == 64#define GEN_HIGHINT64(__x) GEN_INT ((__x) & ~(HOST_WIDE_INT)0x3ff)#define GEN_INT64(__x) GEN_INT (__x)#else#define GEN_HIGHINT64(__x) \ immed_double_const ((__x) & ~(HOST_WIDE_INT)0x3ff, 0, DImode)#define GEN_INT64(__x) \ immed_double_const ((__x) & 0xffffffff, \ ((__x) & 0x80000000 ? -1 : 0), DImode)#endif/* The optimizer is not to assume anything about exactly which bits are set for a HIGH, they are unspecified. Unfortunately this leads to many missed optimizations during CSE. We mask out the non-HIGH bits, and matches a plain movdi, to alleviate this problem. */static voidsparc_emit_set_safe_HIGH64 (dest, val) rtx dest; HOST_WIDE_INT val;{ emit_insn (gen_rtx_SET (VOIDmode, dest, GEN_HIGHINT64 (val)));}static rtxgen_safe_SET64 (dest, val) rtx dest; HOST_WIDE_INT val;{ return gen_rtx_SET (VOIDmode, dest, GEN_INT64 (val));}static rtxgen_safe_OR64 (src, val) rtx src; HOST_WIDE_INT val;{ return gen_rtx_IOR (DImode, src, GEN_INT64 (val));}static rtxgen_safe_XOR64 (src, val) rtx src; HOST_WIDE_INT val;{ return gen_rtx_XOR (DImode, src, GEN_INT64 (val));}/* Worker routines for 64-bit constant formation on arch64. One of the key things to be doing in these emissions is to create as many temp REGs as possible. This makes it possible for half-built constants to be used later when such values are similar to something required later on. Without doing this, the optimizer cannot see such opportunities. */static void sparc_emit_set_const64_quick1 PARAMS ((rtx, rtx, unsigned HOST_WIDE_INT, int));static voidsparc_emit_set_const64_quick1 (op0, temp, low_bits, is_neg) rtx op0; rtx temp; unsigned HOST_WIDE_INT low_bits; int is_neg;{ unsigned HOST_WIDE_INT high_bits; if (is_neg) high_bits = (~low_bits) & 0xffffffff; else high_bits = low_bits; sparc_emit_set_safe_HIGH64 (temp, high_bits); if (!is_neg) { emit_insn (gen_rtx_SET (VOIDmode, op0, gen_safe_OR64 (temp, (high_bits & 0x3ff)))); } else { /* If we are XOR'ing with -1, then we should emit a one's complement instead. This way the combiner will notice logical operations such as ANDN later on and substitute. */ if ((low_bits & 0x3ff) == 0x3ff) { emit_insn (gen_rtx_SET (VOIDmode, op0, gen_rtx_NOT (DImode, temp))); } else { emit_insn (gen_rtx_SET (VOIDmode, op0, gen_safe_XOR64 (temp, (-(HOST_WIDE_INT)0x400 | (low_bits & 0x3ff))))); } }}static void sparc_emit_set_const64_quick2 PARAMS ((rtx, rtx, unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT, int));static voidsparc_emit_set_const64_quick2 (op0, temp, high_bits, low_immediate, shift_count) rtx op0; rtx temp; unsigned HOST_WIDE_INT high_bits; unsigned HOST_WIDE_INT low_immediate; int shift_count;{ rtx temp2 = op0; if ((high_bits & 0xfffffc00) != 0) { sparc_emit_set_safe_HIGH64 (temp, high_bits); if ((high_bits & ~0xfffffc00) != 0) emit_insn (gen_rtx_SET (VOIDmode, op0, gen_safe_OR64 (temp, (high_bits & 0x3ff)))); else temp2 = temp; } else { emit_insn (gen_safe_SET64 (temp, high_bits)); temp2 = temp; } /* Now shift it up into place. */ emit_insn (gen_rtx_SET (VOIDmode, op0, gen_rtx_ASHIFT (DImode, temp2, GEN_INT (shift_count)))); /* If there is a low immediate part piece, finish up by putting that in as well. */ if (low_immediate != 0) emit_insn (gen_rtx_SET (VOIDmode, op0, gen_safe_OR64 (op0, low_immediate)));}static void sparc_emit_set_const64_longway PARAMS ((rtx, rtx, unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT));/* Full 64-bit constant decomposition. Even though this is the 'worst' case, we still optimize a few things away. */static voidsparc_emit_set_const64_longway (op0, temp, high_bits, low_bits) rtx op0; rtx temp; unsigned HOST_WIDE_INT high_bits; unsigned HOST_WIDE_INT low_bits;{ rtx sub_temp; if (reload_in_progress || reload_completed) sub_temp = op0; else sub_temp = gen_reg_rtx (DImode); if ((high_bits & 0xfffffc00) != 0) { sparc_emit_set_safe_HIGH64 (temp, high_bits); if ((high_bits & ~0xfffffc00) != 0) emit_insn (gen_rtx_SET (VOIDmode, sub_temp, gen_safe_OR64 (temp, (high_bits & 0x3ff)))); else sub_temp = temp; } else { emit_insn (gen_safe_SET64 (temp, high_bits)); sub_temp = temp; } if (!reload_in_progress && !reload_completed) { rtx temp2 = gen_reg_rtx (DImode); rtx temp3 = gen_reg_rtx (DImode); rtx temp4 = gen_reg_rtx (DImode); emit_insn (gen_rtx_SET (VOIDmode, temp4, gen_rtx_ASHIFT (DImode, sub_temp, GEN_INT (32)))); sparc_emit_set_safe_HIGH64 (temp2, low_bits); if ((low_bits & ~0xfffffc00) != 0) { emit_insn (gen_rtx_SET (VOIDmode, temp3, gen_safe_OR64 (temp2, (low_bits & 0x3ff)))); emit_insn (gen_rtx_SET (VOIDmode, op0, gen_rtx_PLUS (DImode, temp4, temp3))); } else { emit_insn (gen_rtx_SET (VOIDmode, op0, gen_rtx_PLUS (DImode, temp4, temp2))); } } else { rtx low1 = GEN_INT ((low_bits >> (32 - 12)) & 0xfff); rtx low2 = GEN_INT ((low_bits >> (32 - 12 - 12)) & 0xfff); rtx low3 = GEN_INT ((low_bits >> (32 - 12 - 12 - 8)) & 0x0ff); int to_shift = 12; /* We are in the middle of reload, so this is really painful. However we do still make an attempt to avoid emitting truly stupid code. */ if (low1 != const0_rtx) { emit_insn (gen_rtx_SET (VOIDmode, op0, gen_rtx_ASHIFT (DImode, sub_temp, GEN_INT (to_shift)))); emit_insn (gen_rtx_SET (VOIDmode, op0, gen_rtx_IOR (DImode, op0, low1))); sub_temp = op0; to_shift = 12; } else { to_shift += 12; } if (low2 != const0_rtx) { emit_insn (gen_rtx_SET (VOIDmode, op0, gen_rtx_ASHIFT (DImode, sub_temp, GEN_INT (to_shift)))); emit_insn (gen_rtx_SET (VOIDmode, op0, gen_rtx_IOR (DImode, op0, low2))); sub_temp = op0; to_shift = 8; } else { to_shift += 8; } emit_insn (gen_rtx_SET (VOIDmode, op0, gen_rtx_ASHIFT (DImode, sub_temp, GEN_INT (to_shift)))); if (low3 != const0_rtx) emit_insn (gen_rtx_SET (VOIDmode, op0, gen_rtx_IOR (DImode, op0, low3))); /* phew... */ }}/* Analyze a 64-bit constant for certain properties. */static void analyze_64bit_constant PARAMS ((unsigned HOST_WIDE_INT, unsigned HOST_WIDE_INT, int *, int *, int *));static voidanalyze_64bit_constant (high_bits, low_bits, hbsp, lbsp, abbasp) unsigned HOST_WIDE_INT high_bits, low_bits; int *hbsp, *lbsp, *abbasp;{ int lowest_bit_set, highest_bit_set, all_bits_between_are_set; int i; lowest_bit_set = highest_bit_set = -1; i = 0; do { if ((lowest_bit_set == -1) && ((low_bits >> i) & 1)) lowest_bit_set = i; if ((highest_bit_set == -1)
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -