📄 alpha.c
字号:
{ if (! strict && NONSTRICT_REG_OK_FP_BASE_P (x) && GET_CODE (ofs) == CONST_INT) return true; if ((strict ? STRICT_REG_OK_FOR_BASE_P (x) : NONSTRICT_REG_OK_FOR_BASE_P (x)) && CONSTANT_ADDRESS_P (ofs)) return true; } } /* If we're managing explicit relocations, LO_SUM is valid, as are small data symbols. */ else if (TARGET_EXPLICIT_RELOCS) { if (small_symbolic_operand (x, Pmode)) return true; if (GET_CODE (x) == LO_SUM) { rtx ofs = XEXP (x, 1); x = XEXP (x, 0); /* Discard non-paradoxical subregs. */ if (GET_CODE (x) == SUBREG && (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))) x = SUBREG_REG (x); /* Must have a valid base register. */ if (! (REG_P (x) && (strict ? STRICT_REG_OK_FOR_BASE_P (x) : NONSTRICT_REG_OK_FOR_BASE_P (x)))) return false; /* The symbol must be local. */ if (local_symbolic_operand (ofs, Pmode) || dtp32_symbolic_operand (ofs, Pmode) || tp32_symbolic_operand (ofs, Pmode)) return true; } } return false;}/* Build the SYMBOL_REF for __tls_get_addr. */static GTY(()) rtx tls_get_addr_libfunc;static rtxget_tls_get_addr (void){ if (!tls_get_addr_libfunc) tls_get_addr_libfunc = init_one_libfunc ("__tls_get_addr"); return tls_get_addr_libfunc;}/* Try machine-dependent ways of modifying an illegitimate address to be legitimate. If we find one, return the new, valid address. */rtxalpha_legitimize_address (rtx x, rtx scratch, enum machine_mode mode ATTRIBUTE_UNUSED){ HOST_WIDE_INT addend; /* If the address is (plus reg const_int) and the CONST_INT is not a valid offset, compute the high part of the constant and add it to the register. Then our address is (plus temp low-part-const). */ if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 0)) == REG && GET_CODE (XEXP (x, 1)) == CONST_INT && ! CONSTANT_ADDRESS_P (XEXP (x, 1))) { addend = INTVAL (XEXP (x, 1)); x = XEXP (x, 0); goto split_addend; } /* If the address is (const (plus FOO const_int)), find the low-order part of the CONST_INT. Then load FOO plus any high-order part of the CONST_INT into a register. Our address is (plus reg low-part-const). This is done to reduce the number of GOT entries. */ if (!no_new_pseudos && GET_CODE (x) == CONST && GET_CODE (XEXP (x, 0)) == PLUS && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT) { addend = INTVAL (XEXP (XEXP (x, 0), 1)); x = force_reg (Pmode, XEXP (XEXP (x, 0), 0)); goto split_addend; } /* If we have a (plus reg const), emit the load as in (2), then add the two registers, and finally generate (plus reg low-part-const) as our address. */ if (!no_new_pseudos && GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 0)) == REG && GET_CODE (XEXP (x, 1)) == CONST && GET_CODE (XEXP (XEXP (x, 1), 0)) == PLUS && GET_CODE (XEXP (XEXP (XEXP (x, 1), 0), 1)) == CONST_INT) { addend = INTVAL (XEXP (XEXP (XEXP (x, 1), 0), 1)); x = expand_simple_binop (Pmode, PLUS, XEXP (x, 0), XEXP (XEXP (XEXP (x, 1), 0), 0), NULL_RTX, 1, OPTAB_LIB_WIDEN); goto split_addend; } /* If this is a local symbol, split the address into HIGH/LO_SUM parts. */ if (TARGET_EXPLICIT_RELOCS && symbolic_operand (x, Pmode)) { rtx r0, r16, eqv, tga, tp, insn, dest, seq; switch (tls_symbolic_operand_type (x)) { case TLS_MODEL_GLOBAL_DYNAMIC: start_sequence (); r0 = gen_rtx_REG (Pmode, 0); r16 = gen_rtx_REG (Pmode, 16); tga = get_tls_get_addr (); dest = gen_reg_rtx (Pmode); seq = GEN_INT (alpha_next_sequence_number++); emit_insn (gen_movdi_er_tlsgd (r16, pic_offset_table_rtx, x, seq)); insn = gen_call_value_osf_tlsgd (r0, tga, seq); insn = emit_call_insn (insn); CONST_OR_PURE_CALL_P (insn) = 1; use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r16); insn = get_insns (); end_sequence (); emit_libcall_block (insn, dest, r0, x); return dest; case TLS_MODEL_LOCAL_DYNAMIC: start_sequence (); r0 = gen_rtx_REG (Pmode, 0); r16 = gen_rtx_REG (Pmode, 16); tga = get_tls_get_addr (); scratch = gen_reg_rtx (Pmode); seq = GEN_INT (alpha_next_sequence_number++); emit_insn (gen_movdi_er_tlsldm (r16, pic_offset_table_rtx, seq)); insn = gen_call_value_osf_tlsldm (r0, tga, seq); insn = emit_call_insn (insn); CONST_OR_PURE_CALL_P (insn) = 1; use_reg (&CALL_INSN_FUNCTION_USAGE (insn), r16); insn = get_insns (); end_sequence (); eqv = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx), UNSPEC_TLSLDM_CALL); emit_libcall_block (insn, scratch, r0, eqv); eqv = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, x), UNSPEC_DTPREL); eqv = gen_rtx_CONST (Pmode, eqv); if (alpha_tls_size == 64) { dest = gen_reg_rtx (Pmode); emit_insn (gen_rtx_SET (VOIDmode, dest, eqv)); emit_insn (gen_adddi3 (dest, dest, scratch)); return dest; } if (alpha_tls_size == 32) { insn = gen_rtx_HIGH (Pmode, eqv); insn = gen_rtx_PLUS (Pmode, scratch, insn); scratch = gen_reg_rtx (Pmode); emit_insn (gen_rtx_SET (VOIDmode, scratch, insn)); } return gen_rtx_LO_SUM (Pmode, scratch, eqv); case TLS_MODEL_INITIAL_EXEC: eqv = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, x), UNSPEC_TPREL); eqv = gen_rtx_CONST (Pmode, eqv); tp = gen_reg_rtx (Pmode); scratch = gen_reg_rtx (Pmode); dest = gen_reg_rtx (Pmode); emit_insn (gen_load_tp (tp)); emit_insn (gen_rtx_SET (VOIDmode, scratch, eqv)); emit_insn (gen_adddi3 (dest, tp, scratch)); return dest; case TLS_MODEL_LOCAL_EXEC: eqv = gen_rtx_UNSPEC (Pmode, gen_rtvec (1, x), UNSPEC_TPREL); eqv = gen_rtx_CONST (Pmode, eqv); tp = gen_reg_rtx (Pmode); emit_insn (gen_load_tp (tp)); if (alpha_tls_size == 32) { insn = gen_rtx_HIGH (Pmode, eqv); insn = gen_rtx_PLUS (Pmode, tp, insn); tp = gen_reg_rtx (Pmode); emit_insn (gen_rtx_SET (VOIDmode, tp, insn)); } return gen_rtx_LO_SUM (Pmode, tp, eqv); } if (local_symbolic_operand (x, Pmode)) { if (small_symbolic_operand (x, Pmode)) return x; else { if (!no_new_pseudos) scratch = gen_reg_rtx (Pmode); emit_insn (gen_rtx_SET (VOIDmode, scratch, gen_rtx_HIGH (Pmode, x))); return gen_rtx_LO_SUM (Pmode, scratch, x); } } } return NULL; split_addend: { HOST_WIDE_INT low, high; low = ((addend & 0xffff) ^ 0x8000) - 0x8000; addend -= low; high = ((addend & 0xffffffff) ^ 0x80000000) - 0x80000000; addend -= high; if (addend) x = expand_simple_binop (Pmode, PLUS, x, GEN_INT (addend), (no_new_pseudos ? scratch : NULL_RTX), 1, OPTAB_LIB_WIDEN); if (high) x = expand_simple_binop (Pmode, PLUS, x, GEN_INT (high), (no_new_pseudos ? scratch : NULL_RTX), 1, OPTAB_LIB_WIDEN); return plus_constant (x, low); }}/* Primarily this is required for TLS symbols, but given that our move patterns *ought* to be able to handle any symbol at any time, we should never be spilling symbolic operands to the constant pool, ever. */static boolalpha_cannot_force_const_mem (rtx x){ enum rtx_code code = GET_CODE (x); return code == SYMBOL_REF || code == LABEL_REF || code == CONST;}/* We do not allow indirect calls to be optimized into sibling calls, nor can we allow a call to a function with a different GP to be optimized into a sibcall. */static boolalpha_function_ok_for_sibcall (tree decl, tree exp ATTRIBUTE_UNUSED){ /* Can't do indirect tail calls, since we don't know if the target uses the same GP. */ if (!decl) return false; /* Otherwise, we can make a tail call if the target function shares the same GP. */ return decl_has_samegp (decl);}intsome_small_symbolic_operand_int (rtx *px, void *data ATTRIBUTE_UNUSED){ rtx x = *px; /* Don't re-split. */ if (GET_CODE (x) == LO_SUM) return -1; return small_symbolic_operand (x, Pmode) != 0;}static intsplit_small_symbolic_operand_1 (rtx *px, void *data ATTRIBUTE_UNUSED){ rtx x = *px; /* Don't re-split. */ if (GET_CODE (x) == LO_SUM) return -1; if (small_symbolic_operand (x, Pmode)) { x = gen_rtx_LO_SUM (Pmode, pic_offset_table_rtx, x); *px = x; return -1; } return 0;}rtxsplit_small_symbolic_operand (rtx x){ x = copy_insn (x); for_each_rtx (&x, split_small_symbolic_operand_1, NULL); return x;}/* Indicate that INSN cannot be duplicated. This is true for any insn that we've marked with gpdisp relocs, since those have to stay in 1-1 correspondence with one another. Technically we could copy them if we could set up a mapping from one sequence number to another, across the set of insns to be duplicated. This seems overly complicated and error-prone since interblock motion from sched-ebb could move one of the pair of insns to a different block. Also cannot allow jsr insns to be duplicated. If they throw exceptions, then they'll be in a different block from their ldgp. Which could lead the bb reorder code to think that it would be ok to copy just the block containing the call and branch to the block containing the ldgp. */static boolalpha_cannot_copy_insn_p (rtx insn){ if (!reload_completed || !TARGET_EXPLICIT_RELOCS) return false; if (recog_memoized (insn) >= 0) return get_attr_cannot_copy (insn); else return false;}/* Try a machine-dependent way of reloading an illegitimate address operand. If we find one, push the reload and return the new rtx. */rtxalpha_legitimize_reload_address (rtx x, enum machine_mode mode ATTRIBUTE_UNUSED, int opnum, int type, int ind_levels ATTRIBUTE_UNUSED){ /* We must recognize output that we have already generated ourselves. */ if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 0)) == PLUS && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT && GET_CODE (XEXP (x, 1)) == CONST_INT) { push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL, BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0, opnum, type); return x; } /* We wish to handle large displacements off a base register by splitting the addend across an ldah and the mem insn. This cuts number of extra insns needed from 3 to 1. */ if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 0)) == REG && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER && REGNO_OK_FOR_BASE_P (REGNO (XEXP (x, 0))) && GET_CODE (XEXP (x, 1)) == CONST_INT) { HOST_WIDE_INT val = INTVAL (XEXP (x, 1)); HOST_WIDE_INT low = ((val & 0xffff) ^ 0x8000) - 0x8000; HOST_WIDE_INT high = (((val - low) & 0xffffffff) ^ 0x80000000) - 0x80000000; /* Check for 32-bit overflow. */ if (high + low != val) return NULL_RTX; /* Reload the high part into a base reg; leave the low part in the mem directly. */ x = gen_rtx_PLUS (GET_MODE (x), gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0), GEN_INT (high)), GEN_INT (low)); push_reload (XEXP (x, 0), NULL_RTX, &XEXP (x, 0), NULL, BASE_REG_CLASS, GET_MODE (x), VOIDmode, 0, 0, opnum, type); return x; } return NULL_RTX;}/* Compute a (partial) cost for rtx X. Return true if the complete cost has been computed, and false if subexpressions should be scanned. In either case, *TOTAL contains the cost result. */static boolalpha_rtx_costs (rtx x, int code, int outer_code, int *total){ enum machine_mode mode = GET_MODE (x); bool float_mode_p = FLOAT_MODE_P (mode); const struct alpha_rtx_cost_data *cost_data; if (optimize_size) cost_data = &alpha_rtx_cost_size; else cost_data = &alpha_rtx_cost_data[alpha_cpu]; switch (code) { case CONST_INT: /* If this is an 8-bit constant, return zero since it can be used nearly anywhere with no cost. If it is a valid operand for an ADD or AND, likewise return 0 if we know it will be used in that context. Otherwise, return 2 since it might be used there later. All other constants take at least two insns. */ if (INTVAL (x) >= 0 && INTVAL (x) < 256) { *total = 0; return true; } /* FALLTHRU */ case CONST_DOUBLE: if (x == CONST0_RTX (mode)) *total = 0; else if ((outer_code == PLUS && add_operand (x, VOIDmode)) || (outer_code == AND && and_operand (x, VOIDmode))) *total = 0; else if (add_operand (x, VOIDmode) || and_operand (x, VOIDmode)) *total = 2; else *total = COSTS_N_INSNS (2); return true; case CONST: case SYMBOL_REF: case LABEL_REF: if (TARGET_EXPLICIT_RELOCS && small_symbolic_operand (x, VOIDmode)) *total = COSTS_N_INSNS (outer_code != MEM); else if (TARGET_EXPLICIT_RELOCS && local_symbolic_operand (x, VOIDmode)) *total = COSTS_N_INSNS (1 + (outer_code != MEM)); else if (tls_symbolic_operand_type (x)) /* Estimate of cost for call_pal rduniq. */ /* ??? How many insns do we emit here? More than one... */ *total = COSTS_N_INSNS (15); else /* Otherwise we do a load from the GOT. */ *total = COSTS_N_INSNS (optimize_size ? 1 : alpha_memory_latency); return true; case HIGH: /* This is effectively an add_operand. */ *total = 2; return true; case PLUS:
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -