📄 alpha.c
字号:
bits = (MIN (HOST_BITS_PER_WIDE_INT, GET_MODE_SIZE (mode) * 8) - floor_log2 (c) - 1 - (HOST_BITS_PER_WIDE_INT < 64)); if (bits > 0) for (; bits > 0; bits--) { new = c << bits; temp = alpha_emit_set_const (subtarget, mode, new, i, no_output); if (!temp) { new = (c << bits) | (((HOST_WIDE_INT) 1 << bits) - 1); temp = alpha_emit_set_const (subtarget, mode, new, i, no_output); } if (temp) { if (no_output) return temp; return expand_binop (mode, lshr_optab, temp, GEN_INT (bits), target, 1, OPTAB_WIDEN); } } /* Now try high-order 1 bits. We get that with a sign-extension. But one bit isn't enough here. Be careful to avoid shifting outside the mode and to avoid shifting outside the host wide int size. */ bits = (MIN (HOST_BITS_PER_WIDE_INT, GET_MODE_SIZE (mode) * 8) - floor_log2 (~ c) - 2); if (bits > 0) for (; bits > 0; bits--) { new = c << bits; temp = alpha_emit_set_const (subtarget, mode, new, i, no_output); if (!temp) { new = (c << bits) | (((HOST_WIDE_INT) 1 << bits) - 1); temp = alpha_emit_set_const (subtarget, mode, new, i, no_output); } if (temp) { if (no_output) return temp; return expand_binop (mode, ashr_optab, temp, GEN_INT (bits), target, 0, OPTAB_WIDEN); } } }#if HOST_BITS_PER_WIDE_INT == 64 /* Finally, see if can load a value into the target that is the same as the constant except that all bytes that are 0 are changed to be 0xff. If we can, then we can do a ZAPNOT to obtain the desired constant. */ new = c; for (i = 0; i < 64; i += 8) if ((new & ((HOST_WIDE_INT) 0xff << i)) == 0) new |= (HOST_WIDE_INT) 0xff << i; /* We are only called for SImode and DImode. If this is SImode, ensure that we are sign extended to a full word. */ if (mode == SImode) new = ((new & 0xffffffff) ^ 0x80000000) - 0x80000000; if (new != c) { temp = alpha_emit_set_const (subtarget, mode, new, n - 1, no_output); if (temp) { if (no_output) return temp; return expand_binop (mode, and_optab, temp, GEN_INT (c | ~ new), target, 0, OPTAB_WIDEN); } }#endif return 0;}/* Try to output insns to set TARGET equal to the constant C if it can be done in less than N insns. Do all computations in MODE. Returns the place where the output has been placed if it can be done and the insns have been emitted. If it would take more than N insns, zero is returned and no insns and emitted. */static rtxalpha_emit_set_const (rtx target, enum machine_mode mode, HOST_WIDE_INT c, int n, bool no_output){ enum machine_mode orig_mode = mode; rtx orig_target = target; rtx result = 0; int i; /* If we can't make any pseudos, TARGET is an SImode hard register, we can't load this constant in one insn, do this in DImode. */ if (no_new_pseudos && mode == SImode && GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER) { result = alpha_emit_set_const_1 (target, mode, c, 1, no_output); if (result) return result; target = no_output ? NULL : gen_lowpart (DImode, target); mode = DImode; } else if (mode == V8QImode || mode == V4HImode || mode == V2SImode) { target = no_output ? NULL : gen_lowpart (DImode, target); mode = DImode; } /* Try 1 insn, then 2, then up to N. */ for (i = 1; i <= n; i++) { result = alpha_emit_set_const_1 (target, mode, c, i, no_output); if (result) { rtx insn, set; if (no_output) return result; insn = get_last_insn (); set = single_set (insn); if (! CONSTANT_P (SET_SRC (set))) set_unique_reg_note (get_last_insn (), REG_EQUAL, GEN_INT (c)); break; } } /* Allow for the case where we changed the mode of TARGET. */ if (result) { if (result == target) result = orig_target; else if (mode != orig_mode) result = gen_lowpart (orig_mode, result); } return result;}/* Having failed to find a 3 insn sequence in alpha_emit_set_const, fall back to a straight forward decomposition. We do this to avoid exponential run times encountered when looking for longer sequences with alpha_emit_set_const. */static rtxalpha_emit_set_long_const (rtx target, HOST_WIDE_INT c1, HOST_WIDE_INT c2){ HOST_WIDE_INT d1, d2, d3, d4; /* Decompose the entire word */#if HOST_BITS_PER_WIDE_INT >= 64 if (c2 != -(c1 < 0)) abort (); d1 = ((c1 & 0xffff) ^ 0x8000) - 0x8000; c1 -= d1; d2 = ((c1 & 0xffffffff) ^ 0x80000000) - 0x80000000; c1 = (c1 - d2) >> 32; d3 = ((c1 & 0xffff) ^ 0x8000) - 0x8000; c1 -= d3; d4 = ((c1 & 0xffffffff) ^ 0x80000000) - 0x80000000; if (c1 != d4) abort ();#else d1 = ((c1 & 0xffff) ^ 0x8000) - 0x8000; c1 -= d1; d2 = ((c1 & 0xffffffff) ^ 0x80000000) - 0x80000000; if (c1 != d2) abort (); c2 += (d2 < 0); d3 = ((c2 & 0xffff) ^ 0x8000) - 0x8000; c2 -= d3; d4 = ((c2 & 0xffffffff) ^ 0x80000000) - 0x80000000; if (c2 != d4) abort ();#endif /* Construct the high word */ if (d4) { emit_move_insn (target, GEN_INT (d4)); if (d3) emit_move_insn (target, gen_rtx_PLUS (DImode, target, GEN_INT (d3))); } else emit_move_insn (target, GEN_INT (d3)); /* Shift it into place */ emit_move_insn (target, gen_rtx_ASHIFT (DImode, target, GEN_INT (32))); /* Add in the low bits. */ if (d2) emit_move_insn (target, gen_rtx_PLUS (DImode, target, GEN_INT (d2))); if (d1) emit_move_insn (target, gen_rtx_PLUS (DImode, target, GEN_INT (d1))); return target;}/* Given an integral CONST_INT, CONST_DOUBLE, or CONST_VECTOR, return the low 64 bits. */static voidalpha_extract_integer (rtx x, HOST_WIDE_INT *p0, HOST_WIDE_INT *p1){ HOST_WIDE_INT i0, i1; if (GET_CODE (x) == CONST_VECTOR) x = simplify_subreg (DImode, x, GET_MODE (x), 0); if (GET_CODE (x) == CONST_INT) { i0 = INTVAL (x); i1 = -(i0 < 0); } else if (HOST_BITS_PER_WIDE_INT >= 64) { i0 = CONST_DOUBLE_LOW (x); i1 = -(i0 < 0); } else { i0 = CONST_DOUBLE_LOW (x); i1 = CONST_DOUBLE_HIGH (x); } *p0 = i0; *p1 = i1;}/* Implement LEGITIMATE_CONSTANT_P. This is all constants for which we are willing to load the value into a register via a move pattern. Normally this is all symbolic constants, integral constants that take three or fewer instructions, and floating-point zero. */boolalpha_legitimate_constant_p (rtx x){ enum machine_mode mode = GET_MODE (x); HOST_WIDE_INT i0, i1; switch (GET_CODE (x)) { case CONST: case LABEL_REF: case SYMBOL_REF: case HIGH: return true; case CONST_DOUBLE: if (x == CONST0_RTX (mode)) return true; if (FLOAT_MODE_P (mode)) return false; goto do_integer; case CONST_VECTOR: if (x == CONST0_RTX (mode)) return true; if (GET_MODE_CLASS (mode) != MODE_VECTOR_INT) return false; if (GET_MODE_SIZE (mode) != 8) return false; goto do_integer; case CONST_INT: do_integer: if (TARGET_BUILD_CONSTANTS) return true; alpha_extract_integer (x, &i0, &i1); if (HOST_BITS_PER_WIDE_INT >= 64 || i1 == (-i0 < 0)) return alpha_emit_set_const_1 (x, mode, i0, 3, true) != NULL; return false; default: return false; }}/* Operand 1 is known to be a constant, and should require more than one instruction to load. Emit that multi-part load. */boolalpha_split_const_mov (enum machine_mode mode, rtx *operands){ HOST_WIDE_INT i0, i1; rtx temp = NULL_RTX; alpha_extract_integer (operands[1], &i0, &i1); if (HOST_BITS_PER_WIDE_INT >= 64 || i1 == -(i0 < 0)) temp = alpha_emit_set_const (operands[0], mode, i0, 3, false); if (!temp && TARGET_BUILD_CONSTANTS) temp = alpha_emit_set_long_const (operands[0], i0, i1); if (temp) { if (!rtx_equal_p (operands[0], temp)) emit_move_insn (operands[0], temp); return true; } return false;}/* Expand a move instruction; return true if all work is done. We don't handle non-bwx subword loads here. */boolalpha_expand_mov (enum machine_mode mode, rtx *operands){ /* If the output is not a register, the input must be. */ if (GET_CODE (operands[0]) == MEM && ! reg_or_0_operand (operands[1], mode)) operands[1] = force_reg (mode, operands[1]); /* Allow legitimize_address to perform some simplifications. */ if (mode == Pmode && symbolic_operand (operands[1], mode)) { rtx tmp; tmp = alpha_legitimize_address (operands[1], operands[0], mode); if (tmp) { if (tmp == operands[0]) return true; operands[1] = tmp; return false; } } /* Early out for non-constants and valid constants. */ if (! CONSTANT_P (operands[1]) || input_operand (operands[1], mode)) return false; /* Split large integers. */ if (GET_CODE (operands[1]) == CONST_INT || GET_CODE (operands[1]) == CONST_DOUBLE || GET_CODE (operands[1]) == CONST_VECTOR) { if (alpha_split_const_mov (mode, operands)) return true; } /* Otherwise we've nothing left but to drop the thing to memory. */ operands[1] = force_const_mem (mode, operands[1]); if (reload_in_progress) { emit_move_insn (operands[0], XEXP (operands[1], 0)); operands[1] = copy_rtx (operands[1]); XEXP (operands[1], 0) = operands[0]; } else operands[1] = validize_mem (operands[1]); return false;}/* Expand a non-bwx QImode or HImode move instruction; return true if all work is done. */boolalpha_expand_mov_nobwx (enum machine_mode mode, rtx *operands){ /* If the output is not a register, the input must be. */ if (GET_CODE (operands[0]) == MEM) operands[1] = force_reg (mode, operands[1]); /* Handle four memory cases, unaligned and aligned for either the input or the output. The only case where we can be called during reload is for aligned loads; all other cases require temporaries. */ if (GET_CODE (operands[1]) == MEM || (GET_CODE (operands[1]) == SUBREG && GET_CODE (SUBREG_REG (operands[1])) == MEM) || (reload_in_progress && GET_CODE (operands[1]) == REG && REGNO (operands[1]) >= FIRST_PSEUDO_REGISTER) || (reload_in_progress && GET_CODE (operands[1]) == SUBREG && GET_CODE (SUBREG_REG (operands[1])) == REG && REGNO (SUBREG_REG (operands[1])) >= FIRST_PSEUDO_REGISTER)) { if (aligned_memory_operand (operands[1], mode)) { if (reload_in_progress) { emit_insn ((mode == QImode ? gen_reload_inqi_help : gen_reload_inhi_help) (operands[0], operands[1], gen_rtx_REG (SImode, REGNO (operands[0])))); } else { rtx aligned_mem, bitnum; rtx scratch = gen_reg_rtx (SImode); rtx subtarget; bool copyout; get_aligned_mem (operands[1], &aligned_mem, &bitnum); subtarget = operands[0]; if (GET_CODE (subtarget) == REG) subtarget = gen_lowpart (DImode, subtarget), copyout = false; else subtarget = gen_reg_rtx (DImode), copyout = true; emit_insn ((mode == QImode ? gen_aligned_loadqi : gen_aligned_loadhi) (subtarget, aligned_mem, bitnum, scratch)); if (copyout) emit_move_insn (operands[0], gen_lowpart (mode, subtarget)); } } else { /* Don't pass these as parameters since that makes the generated code depend on parameter evaluation order which will cause bootstrap failures. */ rtx temp1, temp2, seq, subtarget; bool copyout; temp1 = gen_reg_rtx (DImode); temp2 = gen_reg_rtx (DImode); subtarget = operands[0]; if (GET_CODE (subtarget) == REG) subtarget = gen_lowpart (DImode, subtarget), copyout = false; else subtarget = gen_reg_rtx (DImode), copyout = true; seq = ((mode == QImode ? gen_unaligned_loadqi : gen_unaligned_loadhi) (subtarget, get_unaligned_address (operands[1], 0), temp1, temp2)); alpha_set_memflags (seq, operands[1]); emit_insn (seq); if (copyout) emit_move_insn (operands[0], gen_lowpart (mode, subtarget)); } return true; } if (GET_CODE (operands[0]) == MEM || (GET_CODE (operands[0]) == SUBREG && GET_CODE (SUBREG_REG (operands[0])) == MEM) || (reload_in_progress && GET_CODE (operands[0]) == REG && REGNO (operands[0]) >= FIRST_PSEUDO_REGISTER) || (reload_in_progress && GET_CODE (operands[0]) == SUBREG && GET_CODE (SUB
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -