📄 alpha.c
字号:
str++; return str;}#if TARGET_ABI_OPEN_VMSstatic boolalpha_linkage_symbol_p (symname) const char *symname;{ int symlen = strlen (symname); if (symlen > 4) return strcmp (&symname [symlen - 4], "..lk") == 0; return false;}#define LINKAGE_SYMBOL_REF_P(X) \ ((GET_CODE (X) == SYMBOL_REF \ && alpha_linkage_symbol_p (XSTR (X, 0))) \ || (GET_CODE (X) == CONST \ && GET_CODE (XEXP (X, 0)) == PLUS \ && GET_CODE (XEXP (XEXP (X, 0), 0)) == SYMBOL_REF \ && alpha_linkage_symbol_p (XSTR (XEXP (XEXP (X, 0), 0), 0))))#endif/* legitimate_address_p recognizes an RTL expression that is a valid memory address for an instruction. The MODE argument is the machine mode for the MEM expression that wants to use this address. For Alpha, we have either a constant address or the sum of a register and a constant address, or just a register. For DImode, any of those forms can be surrounded with an AND that clear the low-order three bits; this is an "unaligned" access. */boolalpha_legitimate_address_p (mode, x, strict) enum machine_mode mode; rtx x; int strict;{ /* If this is an ldq_u type address, discard the outer AND. */ if (mode == DImode && GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) == -8) x = XEXP (x, 0); /* Discard non-paradoxical subregs. */ if (GET_CODE (x) == SUBREG && (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))) x = SUBREG_REG (x); /* Unadorned general registers are valid. */ if (REG_P (x) && (strict ? STRICT_REG_OK_FOR_BASE_P (x) : NONSTRICT_REG_OK_FOR_BASE_P (x))) return true; /* Constant addresses (i.e. +/- 32k) are valid. */ if (CONSTANT_ADDRESS_P (x)) return true;#if TARGET_ABI_OPEN_VMS if (LINKAGE_SYMBOL_REF_P (x)) return true;#endif /* Register plus a small constant offset is valid. */ if (GET_CODE (x) == PLUS) { rtx ofs = XEXP (x, 1); x = XEXP (x, 0); /* Discard non-paradoxical subregs. */ if (GET_CODE (x) == SUBREG && (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))) x = SUBREG_REG (x); if (REG_P (x)) { if (! strict && NONSTRICT_REG_OK_FP_BASE_P (x) && GET_CODE (ofs) == CONST_INT) return true; if ((strict ? STRICT_REG_OK_FOR_BASE_P (x) : NONSTRICT_REG_OK_FOR_BASE_P (x)) && CONSTANT_ADDRESS_P (ofs)) return true; } else if (GET_CODE (x) == ADDRESSOF && GET_CODE (ofs) == CONST_INT) return true; } /* If we're managing explicit relocations, LO_SUM is valid, as are small data symbols. */ else if (TARGET_EXPLICIT_RELOCS) { if (small_symbolic_operand (x, Pmode)) return true; if (GET_CODE (x) == LO_SUM) { rtx ofs = XEXP (x, 1); x = XEXP (x, 0); /* Discard non-paradoxical subregs. */ if (GET_CODE (x) == SUBREG && (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))) x = SUBREG_REG (x); /* Must have a valid base register. */ if (! (REG_P (x) && (strict ? STRICT_REG_OK_FOR_BASE_P (x) : NONSTRICT_REG_OK_FOR_BASE_P (x)))) return false; /* The symbol must be local. */ if (local_symbolic_operand (ofs, Pmode) || dtp32_symbolic_operand (ofs, Pmode) || tp32_symbolic_operand (ofs, Pmode)) return true; } } return false;}/* Try machine-dependent ways of modifying an illegitimate address to be legitimate. If we find one, return the new, valid address. */rtxalpha_legitimize_address (x, scratch, mode) rtx x; rtx scratch; enum machine_mode mode ATTRIBUTE_UNUSED;{ HOST_WIDE_INT addend; /* If the address is (plus reg const_int) and the CONST_INT is not a valid offset, compute the high part of the constant and add it to the register. Then our address is (plus temp low-part-const). */ if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 0)) == REG && GET_CODE (XEXP (x, 1)) == CONST_INT && ! CONSTANT_ADDRESS_P (XEXP (x, 1))) { addend = INTVAL (XEXP (x, 1)); x = XEXP (x, 0); goto split_addend; } /* If the address is (const (plus FOO const_int)), find the low-order part of the CONST_INT. Then load FOO plus any high-order part of the CONST_INT into a register. Our address is (plus reg low-part-const). This is done to reduce the number of GOT entries. */ if (!no_new_pseudos && GET_CODE (x) == CONST && GET_CODE (XEXP (x, 0)) == PLUS && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT) { addend = INTVAL (XEXP (XEXP (x, 0), 1)); x = force_reg (Pmode, XEXP (XEXP (x, 0), 0)); goto split_addend; } /* If we have a (plus reg const), emit the load as in (2), then add the two registers, and finally generate (plus reg low-part-const) as our address. */ if (!no_new_pseudos && GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 0)) == REG && GET_CODE (XEXP (x, 1)) == CONST && GET_CODE (XEXP (XEXP (x, 1), 0)) == PLUS && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == CONST_INT) { addend = INTVAL (XEXP (XEXP (XEXP (x, 1), 0), 1)); x = expand_simple_binop (Pmode, PLUS, XEXP (x, 0), XEXP (XEXP (XEXP (x, 1), 0), 0), NULL_RTX, 1, OPTAB_LIB_WIDEN); goto split_addend; } /* If this is a local symbol, split the address into HIGH/LO_SUM parts. */ if (TARGET_EXPLICIT_RELOCS && symbolic_operand (x, Pmode)) { rtx r0, r16, eqv, tga, tp, insn, dest, seq; switch (tls_symbolic_operand_type (x)) { case TLS_MODEL_GLOBAL_DYNAMIC: start_sequence (); r0 = gen_rtx_REG (Pmode, 0); r16 = gen_rtx_REG (Pmode, 16); tga = gen_rtx_SYMBOL_REF (Pmode, "__tls_get_addr"); dest = gen_reg_rtx (Pmode); seq = GEN_INT (alpha_next_sequence_number++); emit_insn (gen_movdi_er_tlsgd (r16, pic_offset_table_rtx, x, seq)); insn = gen_call_value_osf_tlsgd (r0, tga, seq); insn = emit_call_insn (insn); CONST_OR_PURE_CALL_P (insn) = 1; use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r16); insn = get_insns (); end_sequence (); emit_libcall_block (insn, dest, r0, x); return dest; case TLS_MODEL_LOCAL_DYNAMIC: start_sequence (); r0 = gen_rtx_REG (Pmode, 0); r16 = gen_rtx_REG (Pmode, 16); tga = gen_rtx_SYMBOL_REF (Pmode, "__tls_get_addr"); scratch = gen_reg_rtx (Pmode); seq = GEN_INT (alpha_next_sequence_number++); emit_insn (gen_movdi_er_tlsldm (r16, pic_offset_table_rtx, seq)); insn = gen_call_value_osf_tlsldm (r0, tga, seq); insn = emit_call_insn (insn); CONST_OR_PURE_CALL_P (insn) = 1; use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r16); insn = get_insns (); end_sequence (); eqv = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx), UNSPEC_TLSLDM_CALL); emit_libcall_block (insn, scratch, r0, eqv); eqv = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, x), UNSPEC_DTPREL); eqv = gen_rtx_CONST (Pmode, eqv); if (alpha_tls_size == 64) { dest = gen_reg_rtx (Pmode); emit_insn (gen_rtx_SET (VOIDmode, dest, eqv)); emit_insn (gen_adddi3 (dest, dest, scratch)); return dest; } if (alpha_tls_size == 32) { insn = gen_rtx_HIGH (Pmode, eqv); insn = gen_rtx_PLUS (Pmode, scratch, insn); scratch = gen_reg_rtx (Pmode); emit_insn (gen_rtx_SET (VOIDmode, scratch, insn)); } return gen_rtx_LO_SUM (Pmode, scratch, eqv); case TLS_MODEL_INITIAL_EXEC: eqv = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, x), UNSPEC_TPREL); eqv = gen_rtx_CONST (Pmode, eqv); tp = gen_reg_rtx (Pmode); scratch = gen_reg_rtx (Pmode); dest = gen_reg_rtx (Pmode); emit_insn (gen_load_tp (tp)); emit_insn (gen_rtx_SET (VOIDmode, scratch, eqv)); emit_insn (gen_adddi3 (dest, tp, scratch)); return dest; case TLS_MODEL_LOCAL_EXEC: eqv = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, x), UNSPEC_TPREL); eqv = gen_rtx_CONST (Pmode, eqv); tp = gen_reg_rtx (Pmode); emit_insn (gen_load_tp (tp)); if (alpha_tls_size == 32) { insn = gen_rtx_HIGH (Pmode, eqv); insn = gen_rtx_PLUS (Pmode, tp, insn); tp = gen_reg_rtx (Pmode); emit_insn (gen_rtx_SET (VOIDmode, tp, insn)); } return gen_rtx_LO_SUM (Pmode, tp, eqv); } if (local_symbolic_operand (x, Pmode)) { if (small_symbolic_operand (x, Pmode)) return x; else { if (!no_new_pseudos) scratch = gen_reg_rtx (Pmode); emit_insn (gen_rtx_SET (VOIDmode, scratch, gen_rtx_HIGH (Pmode, x))); return gen_rtx_LO_SUM (Pmode, scratch, x); } } } return NULL; split_addend: { HOST_WIDE_INT low, high; low = ((addend & 0xffff) ^ 0x8000) - 0x8000; addend -= low; high = ((addend & 0xffffffff) ^ 0x80000000) - 0x80000000; addend -= high; if (addend) x = expand_simple_binop (Pmode, PLUS, x, GEN_INT (addend), (no_new_pseudos ? scratch : NULL_RTX), 1, OPTAB_LIB_WIDEN); if (high) x = expand_simple_binop (Pmode, PLUS, x, GEN_INT (high), (no_new_pseudos ? scratch : NULL_RTX), 1, OPTAB_LIB_WIDEN); return plus_constant (x, low); }}/* For TARGET_EXPLICIT_RELOCS, we don't obfuscate a SYMBOL_REF to a small symbolic operand until after reload. At which point we need to replace (mem (symbol_ref)) with (mem (lo_sum $29 symbol_ref)) so that sched2 has the proper dependency information. */intsome_small_symbolic_operand (x, mode) rtx x; enum machine_mode mode ATTRIBUTE_UNUSED;{ return for_each_rtx (&x, some_small_symbolic_operand_1, NULL);}static intsome_small_symbolic_operand_1 (px, data) rtx *px; void *data ATTRIBUTE_UNUSED;{ rtx x = *px; /* Don't re-split. */ if (GET_CODE (x) == LO_SUM) return -1; return small_symbolic_operand (x, Pmode) != 0;}rtxsplit_small_symbolic_operand (x) rtx x;{ x = copy_insn (x); for_each_rtx (&x, split_small_symbolic_operand_1, NULL); return x;}static intsplit_small_symbolic_operand_1 (px, data) rtx *px; void *data ATTRIBUTE_UNUSED;{ rtx x = *px; /* Don't re-split. */ if (GET_CODE (x) == LO_SUM) return -1; if (small_symbolic_operand (x, Pmode)) { x = gen_rtx_LO_SUM (Pmode, pic_offset_table_rtx, x); *px = x; return -1; } return 0;}/* Try a machine-dependent way of reloading an illegitimate address operand. If we find one, push the reload and return the new rtx. */ rtxalpha_legitimize_reload_address (x, mode, opnum, type, ind_levels) rtx x; enum machine_mode mode ATTRIBUTE_UNUSED; int opnum; int type; int ind_levels ATTRIBUTE_UNUSED;{ /* We must recognize output that we have already generated ourselves. */ if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 0)) == PLUS && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT && GET_CODE (XEXP (x, 1)) == CONST_INT) { push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL, BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0, opnum, type); return x; } /* We wish to handle large displacements off a base register by splitting the addend across an ldah and the mem insn. This cuts number of extra insns needed from 3 to 1. */ if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 0)) == REG && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER && REGNO_OK_FOR_BASE_P (REGNO (XEXP (x, 0))) && GET_CODE (XEXP (x, 1)) == CONST_INT) { HOST_WIDE_INT val = INTVAL (XEXP (x, 1)); HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000; HOST_WIDE_INT high = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000; /* Check for 32-bit overflow. */ if (high + low != val) return NULL_RTX; /* Reload the high part into a base reg; leave the low part in the mem directly. */ x = gen_rtx_PLUS (GET_MODE (x), gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0), GEN_INT (high)), GEN_INT (low)); push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL, BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0, opnum, type); return x; } return NULL_RTX;}/* REF is an alignable memory location. Place an aligned SImode reference into *PALIGNED_MEM and the number of bits to shift into *PBITNUM. SCRATCH is a free register for use in reloading out of range stack slots. */voidget_aligned_mem (ref, paligned_mem, pbitnum) rtx ref; rtx *paligned_mem, *pbitnum;{ rtx base; HOST_WIDE_INT offset = 0; if (GET_CODE (ref) != MEM) abort (); if (reload_in_progress && ! memory_address_p (GET_MODE (ref), XEXP (ref, 0))) { base = find_replacement (&XEXP (ref, 0)); if (! memory_address_p (GET_MODE (ref), base)) abort (); } else { base = XEXP (ref, 0); } if (GET_CODE (base) == PLUS) offset += INTVAL (XEXP (base, 1)), base = XEXP (base, 0); *paligned_mem = widen_memory_access (ref, SImode, (offset & ~3) - offset); if (WORDS_BIG_ENDIAN) *pbitnum = GEN_INT (32 - (GET_MODE_BITSIZE (GET_MODE (ref)) + (offset & 3) * 8)); else *pbitnum = GEN_INT ((offset & 3) * 8);}/* Similar, but just get the address. Handle the two reload cases. Add EXTRA_OFFSET to the address we return. */rtxget_unaligned_address (ref, extra_offset) rtx ref;
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -