📄 pa.c
字号:
#if TARGET_HPUX_11_11 case OPT_munix_98: flag_pa_unix = 1998; return true;#endif default: return true; }}voidoverride_options (void){ /* Unconditional branches in the delay slot are not compatible with dwarf2 call frame information. There is no benefit in using this optimization on PA8000 and later processors. */ if (pa_cpu >= PROCESSOR_8000 || (! USING_SJLJ_EXCEPTIONS && flag_exceptions) || flag_unwind_tables) target_flags &= ~MASK_JUMP_IN_DELAY; if (flag_pic && TARGET_PORTABLE_RUNTIME) { warning (0, "PIC code generation is not supported in the portable runtime model"); } if (flag_pic && TARGET_FAST_INDIRECT_CALLS) { warning (0, "PIC code generation is not compatible with fast indirect calls"); } if (! TARGET_GAS && write_symbols != NO_DEBUG) { warning (0, "-g is only supported when using GAS on this processor,"); warning (0, "-g option disabled"); write_symbols = NO_DEBUG; } /* We only support the "big PIC" model now. And we always generate PIC code when in 64bit mode. */ if (flag_pic == 1 || TARGET_64BIT) flag_pic = 2; /* We can't guarantee that .dword is available for 32-bit targets. */ if (UNITS_PER_WORD == 4) targetm.asm_out.aligned_op.di = NULL; /* The unaligned ops are only available when using GAS. */ if (!TARGET_GAS) { targetm.asm_out.unaligned_op.hi = NULL; targetm.asm_out.unaligned_op.si = NULL; targetm.asm_out.unaligned_op.di = NULL; } init_machine_status = pa_init_machine_status;}static voidpa_init_builtins (void){#ifdef DONT_HAVE_FPUTC_UNLOCKED built_in_decls[(int) BUILT_IN_FPUTC_UNLOCKED] = NULL_TREE; implicit_built_in_decls[(int) BUILT_IN_FPUTC_UNLOCKED] = NULL_TREE;#endif}/* Function to init struct machine_function. This will be called, via a pointer variable, from push_function_context. */static struct machine_function *pa_init_machine_status (void){ return ggc_alloc_cleared (sizeof (machine_function));}/* If FROM is a probable pointer register, mark TO as a probable pointer register with the same pointer alignment as FROM. */static voidcopy_reg_pointer (rtx to, rtx from){ if (REG_POINTER (from)) mark_reg_pointer (to, REGNO_POINTER_ALIGN (REGNO (from)));}/* Return 1 if X contains a symbolic expression. We know these expressions will have one of a few well defined forms, so we need only check those forms. */intsymbolic_expression_p (rtx x){ /* Strip off any HIGH. */ if (GET_CODE (x) == HIGH) x = XEXP (x, 0); return (symbolic_operand (x, VOIDmode));}/* Accept any constant that can be moved in one instruction into a general register. */intcint_ok_for_move (HOST_WIDE_INT intval){ /* OK if ldo, ldil, or zdepi, can be used. */ return (CONST_OK_FOR_LETTER_P (intval, 'J') || CONST_OK_FOR_LETTER_P (intval, 'N') || CONST_OK_FOR_LETTER_P (intval, 'K'));}/* Return truth value of whether OP can be used as an operand in a adddi3 insn. */intadddi3_operand (rtx op, enum machine_mode mode){ return (register_operand (op, mode) || (GET_CODE (op) == CONST_INT && (TARGET_64BIT ? INT_14_BITS (op) : INT_11_BITS (op))));}/* True iff zdepi can be used to generate this CONST_INT. zdepi first sign extends a 5 bit signed number to a given field length, then places this field anywhere in a zero. */intzdepi_cint_p (unsigned HOST_WIDE_INT x){ unsigned HOST_WIDE_INT lsb_mask, t; /* This might not be obvious, but it's at least fast. This function is critical; we don't have the time loops would take. */ lsb_mask = x & -x; t = ((x >> 4) + lsb_mask) & ~(lsb_mask - 1); /* Return true iff t is a power of two. */ return ((t & (t - 1)) == 0);}/* True iff depi or extru can be used to compute (reg & mask). Accept bit pattern like these: 0....01....1 1....10....0 1..10..01..1 */intand_mask_p (unsigned HOST_WIDE_INT mask){ mask = ~mask; mask += mask & -mask; return (mask & (mask - 1)) == 0;}/* True iff depi can be used to compute (reg | MASK). */intior_mask_p (unsigned HOST_WIDE_INT mask){ mask += mask & -mask; return (mask & (mask - 1)) == 0;}/* Legitimize PIC addresses. If the address is already position-independent, we return ORIG. Newly generated position-independent addresses go to REG. If we need more than one register, we lose. */rtxlegitimize_pic_address (rtx orig, enum machine_mode mode, rtx reg){ rtx pic_ref = orig; gcc_assert (!PA_SYMBOL_REF_TLS_P (orig)); /* Labels need special handling. */ if (pic_label_operand (orig, mode)) { /* We do not want to go through the movXX expanders here since that would create recursion. Nor do we really want to call a generator for a named pattern since that requires multiple patterns if we want to support multiple word sizes. So instead we just emit the raw set, which avoids the movXX expanders completely. */ mark_reg_pointer (reg, BITS_PER_UNIT); emit_insn (gen_rtx_SET (VOIDmode, reg, orig)); current_function_uses_pic_offset_table = 1; return reg; } if (GET_CODE (orig) == SYMBOL_REF) { rtx insn, tmp_reg; gcc_assert (reg); /* Before reload, allocate a temporary register for the intermediate result. This allows the sequence to be deleted when the final result is unused and the insns are trivially dead. */ tmp_reg = ((reload_in_progress || reload_completed) ? reg : gen_reg_rtx (Pmode)); emit_move_insn (tmp_reg, gen_rtx_PLUS (word_mode, pic_offset_table_rtx, gen_rtx_HIGH (word_mode, orig))); pic_ref = gen_const_mem (Pmode, gen_rtx_LO_SUM (Pmode, tmp_reg, gen_rtx_UNSPEC (Pmode, gen_rtvec (1, orig), UNSPEC_DLTIND14R))); current_function_uses_pic_offset_table = 1; mark_reg_pointer (reg, BITS_PER_UNIT); insn = emit_move_insn (reg, pic_ref); /* Put a REG_EQUAL note on this insn, so that it can be optimized. */ REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_EQUAL, orig, REG_NOTES (insn)); return reg; } else if (GET_CODE (orig) == CONST) { rtx base; if (GET_CODE (XEXP (orig, 0)) == PLUS && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx) return orig; gcc_assert (reg); gcc_assert (GET_CODE (XEXP (orig, 0)) == PLUS); base = legitimize_pic_address (XEXP (XEXP (orig, 0), 0), Pmode, reg); orig = legitimize_pic_address (XEXP (XEXP (orig, 0), 1), Pmode, base == reg ? 0 : reg); if (GET_CODE (orig) == CONST_INT) { if (INT_14_BITS (orig)) return plus_constant (base, INTVAL (orig)); orig = force_reg (Pmode, orig); } pic_ref = gen_rtx_PLUS (Pmode, base, orig); /* Likewise, should we set special REG_NOTEs here? */ } return pic_ref;}static GTY(()) rtx gen_tls_tga;static rtxgen_tls_get_addr (void){ if (!gen_tls_tga) gen_tls_tga = init_one_libfunc ("__tls_get_addr"); return gen_tls_tga;}static rtxhppa_tls_call (rtx arg){ rtx ret; ret = gen_reg_rtx (Pmode); emit_library_call_value (gen_tls_get_addr (), ret, LCT_CONST, Pmode, 1, arg, Pmode); return ret;}static rtxlegitimize_tls_address (rtx addr){ rtx ret, insn, tmp, t1, t2, tp; enum tls_model model = SYMBOL_REF_TLS_MODEL (addr); switch (model) { case TLS_MODEL_GLOBAL_DYNAMIC: tmp = gen_reg_rtx (Pmode); emit_insn (gen_tgd_load (tmp, addr)); ret = hppa_tls_call (tmp); break; case TLS_MODEL_LOCAL_DYNAMIC: ret = gen_reg_rtx (Pmode); tmp = gen_reg_rtx (Pmode); start_sequence (); emit_insn (gen_tld_load (tmp, addr)); t1 = hppa_tls_call (tmp); insn = get_insns (); end_sequence (); t2 = gen_reg_rtx (Pmode); emit_libcall_block (insn, t2, t1, gen_rtx_UNSPEC (Pmode, gen_rtvec (1, const0_rtx), UNSPEC_TLSLDBASE)); emit_insn (gen_tld_offset_load (ret, addr, t2)); break; case TLS_MODEL_INITIAL_EXEC: tp = gen_reg_rtx (Pmode); tmp = gen_reg_rtx (Pmode); ret = gen_reg_rtx (Pmode); emit_insn (gen_tp_load (tp)); emit_insn (gen_tie_load (tmp, addr)); emit_move_insn (ret, gen_rtx_PLUS (Pmode, tp, tmp)); break; case TLS_MODEL_LOCAL_EXEC: tp = gen_reg_rtx (Pmode); ret = gen_reg_rtx (Pmode); emit_insn (gen_tp_load (tp)); emit_insn (gen_tle_load (ret, addr, tp)); break; default: gcc_unreachable (); } return ret;}/* Try machine-dependent ways of modifying an illegitimate address to be legitimate. If we find one, return the new, valid address. This macro is used in only one place: `memory_address' in explow.c. OLDX is the address as it was before break_out_memory_refs was called. In some cases it is useful to look at this to decide what needs to be done. MODE and WIN are passed so that this macro can use GO_IF_LEGITIMATE_ADDRESS. It is always safe for this macro to do nothing. It exists to recognize opportunities to optimize the output. For the PA, transform: memory(X + <large int>) into: if (<large int> & mask) >= 16 Y = (<large int> & ~mask) + mask + 1 Round up. else Y = (<large int> & ~mask) Round down. Z = X + Y memory (Z + (<large int> - Y)); This is for CSE to find several similar references, and only use one Z. X can either be a SYMBOL_REF or REG, but because combine cannot perform a 4->2 combination we do nothing for SYMBOL_REF + D where D will not fit in 14 bits. MODE_FLOAT references allow displacements which fit in 5 bits, so use 0x1f as the mask. MODE_INT references allow displacements which fit in 14 bits, so use 0x3fff as the mask. This relies on the fact that most mode MODE_FLOAT references will use FP registers and most mode MODE_INT references will use integer registers. (In the rare case of an FP register used in an integer MODE, we depend on secondary reloads to clean things up.) It is also beneficial to handle (plus (mult (X) (Y)) (Z)) in a special manner if Y is 2, 4, or 8. (allows more shadd insns and shifted indexed addressing modes to be used). Put X and Z into registers. Then put the entire expression into a register. */rtxhppa_legitimize_address (rtx x, rtx oldx ATTRIBUTE_UNUSED, enum machine_mode mode){ rtx orig = x; /* We need to canonicalize the order of operands in unscaled indexed addresses since the code that checks if an address is valid doesn't always try both orders. */ if (!TARGET_NO_SPACE_REGS && GET_CODE (x) == PLUS && GET_MODE (x) == Pmode && REG_P (XEXP (x, 0)) && REG_P (XEXP (x, 1)) && REG_POINTER (XEXP (x, 0)) && !REG_POINTER (XEXP (x, 1))) return gen_rtx_PLUS (Pmode, XEXP (x, 1), XEXP (x, 0)); if (PA_SYMBOL_REF_TLS_P (x)) return legitimize_tls_address (x); else if (flag_pic) return legitimize_pic_address (x, mode, gen_reg_rtx (Pmode)); /* Strip off CONST. */ if (GET_CODE (x) == CONST) x = XEXP (x, 0); /* Special case. Get the SYMBOL_REF into a register and use indexing. That should always be safe. */ if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 0)) == REG && GET_CODE (XEXP (x, 1)) == SYMBOL_REF) { rtx reg = force_reg (Pmode, XEXP (x, 1)); return force_reg (Pmode, gen_rtx_PLUS (Pmode, reg, XEXP (x, 0))); } /* Note we must reject symbols which represent function addresses since the assembler/linker can't handle arithmetic on plabels. */ if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT && ((GET_CODE (XEXP (x, 0)) == SYMBOL_REF && !FUNCTION_NAME_P (XSTR (XEXP (x, 0), 0))) || GET_CODE (XEXP (x, 0)) == REG)) { rtx int_part, ptr_reg; int newoffset; int offset = INTVAL (XEXP (x, 1)); int mask; mask = (GET_MODE_CLASS (mode) == MODE_FLOAT ? (TARGET_PA_20 ? 0x3fff : 0x1f) : 0x3fff); /* Choose which way to round the offset. Round up if we are >= halfway to the next boundary. */ if ((offset & mask) >= ((mask + 1) / 2)) newoffset = (offset & ~ mask) + mask + 1; else newoffset = (offset & ~ mask); /* If the newoffset will not fit in 14 bits (ldo), then
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -