📄 arm.c
字号:
i -= 2; } } /* Now start emitting the insns, starting with the one with the highest bit set: we do this so that the smallest number will be emitted last; this is more likely to be combinable with addressing insns. */ i = best_start; do { int end; if (i <= 0) i += 32; if (remainder & (3 << (i - 2))) { end = i - 8; if (end < 0) end += 32; temp1 = remainder & ((0x0ff << end) | ((i < end) ? (0xff >> (32 - end)) : 0)); remainder &= ~temp1; if (code == SET) { if (generate) emit_insn (gen_rtx (SET, VOIDmode, new_src = (subtargets ? gen_reg_rtx (mode) : target), GEN_INT (can_invert ? ~temp1 : temp1))); can_invert = 0; code = PLUS; } else if (code == MINUS) { if (generate) emit_insn (gen_rtx (SET, VOIDmode, new_src = (subtargets ? gen_reg_rtx (mode) : target), gen_rtx (code, mode, GEN_INT (temp1), source))); code = PLUS; } else { if (generate) emit_insn (gen_rtx (SET, VOIDmode, new_src = (remainder ? (subtargets ? gen_reg_rtx (mode) : target) : target), gen_rtx (code, mode, source, GEN_INT (can_invert ? ~temp1 : (can_negate ? -temp1 : temp1))))); } insns++; source = new_src; i -= 6; } i -= 2; } while (remainder); } return insns;}/* Canonicalize a comparison so that we are more likely to recognize it. This can be done for a few constant compares, where we can make the immediate value easier to load. */enum rtx_codearm_canonicalize_comparison (code, op1) enum rtx_code code; rtx *op1;{ HOST_WIDE_INT i = INTVAL (*op1); switch (code) { case EQ: case NE: return code; case GT: case LE: if (i != (1 << (HOST_BITS_PER_WIDE_INT - 1) - 1) && (const_ok_for_arm (i+1) || const_ok_for_arm (- (i+1)))) { *op1 = GEN_INT (i+1); return code == GT ? GE : LT; } break; case GE: case LT: if (i != (1 << (HOST_BITS_PER_WIDE_INT - 1)) && (const_ok_for_arm (i-1) || const_ok_for_arm (- (i-1)))) { *op1 = GEN_INT (i-1); return code == GE ? GT : LE; } break; case GTU: case LEU: if (i != ~0 && (const_ok_for_arm (i+1) || const_ok_for_arm (- (i+1)))) { *op1 = GEN_INT (i + 1); return code == GTU ? GEU : LTU; } break; case GEU: case LTU: if (i != 0 && (const_ok_for_arm (i - 1) || const_ok_for_arm (- (i - 1)))) { *op1 = GEN_INT (i - 1); return code == GEU ? GTU : LEU; } break; default: abort (); } return code;} /* Handle aggregates that are not laid out in a BLKmode element. This is a sub-element of RETURN_IN_MEMORY. */intarm_return_in_memory (type) tree type;{ if (TREE_CODE (type) == RECORD_TYPE) { tree field; /* For a struct, we can return in a register if every element was a bit-field. */ for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field)) if (TREE_CODE (field) != FIELD_DECL || ! DECL_BIT_FIELD_TYPE (field)) return 1; return 0; } else if (TREE_CODE (type) == UNION_TYPE) { tree field; /* Unions can be returned in registers if every element is integral, or can be returned in an integer register. */ for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field)) { if (TREE_CODE (field) != FIELD_DECL || (AGGREGATE_TYPE_P (TREE_TYPE (field)) && RETURN_IN_MEMORY (TREE_TYPE (field))) || FLOAT_TYPE_P (TREE_TYPE (field))) return 1; } return 0; } /* XXX Not sure what should be done for other aggregates, so put them in memory. */ return 1;}intlegitimate_pic_operand_p (x) rtx x;{ if (CONSTANT_P (x) && flag_pic && (GET_CODE (x) == SYMBOL_REF || (GET_CODE (x) == CONST && GET_CODE (XEXP (x, 0)) == PLUS && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF))) return 0; return 1;}rtxlegitimize_pic_address (orig, mode, reg) rtx orig; enum machine_mode mode; rtx reg;{ if (GET_CODE (orig) == SYMBOL_REF) { rtx pic_ref, address; rtx insn; int subregs = 0; if (reg == 0) { if (reload_in_progress || reload_completed) abort (); else reg = gen_reg_rtx (Pmode); subregs = 1; }#ifdef AOF_ASSEMBLER /* The AOF assembler can generate relocations for these directly, and understands that the PIC register has to be added into the offset. */ insn = emit_insn (gen_pic_load_addr_based (reg, orig));#else if (subregs) address = gen_reg_rtx (Pmode); else address = reg; emit_insn (gen_pic_load_addr (address, orig)); pic_ref = gen_rtx (MEM, Pmode, gen_rtx (PLUS, Pmode, pic_offset_table_rtx, address)); RTX_UNCHANGING_P (pic_ref) = 1; insn = emit_move_insn (reg, pic_ref);#endif current_function_uses_pic_offset_table = 1; /* Put a REG_EQUAL note on this insn, so that it can be optimized by loop. */ REG_NOTES (insn) = gen_rtx (EXPR_LIST, REG_EQUAL, orig, REG_NOTES (insn)); return reg; } else if (GET_CODE (orig) == CONST) { rtx base, offset; if (GET_CODE (XEXP (orig, 0)) == PLUS && XEXP (XEXP (orig, 0), 0) == pic_offset_table_rtx) return orig; if (reg == 0) { if (reload_in_progress || reload_completed) abort (); else reg = gen_reg_rtx (Pmode); } if (GET_CODE (XEXP (orig, 0)) == PLUS) { base = legitimize_pic_address (XEXP (XEXP (orig, 0), 0), Pmode, reg); offset = legitimize_pic_address (XEXP (XEXP (orig, 0), 1), Pmode, base == reg ? 0 : reg); } else abort (); if (GET_CODE (offset) == CONST_INT) { /* The base register doesn't really matter, we only want to test the index for the appropriate mode. */ GO_IF_LEGITIMATE_INDEX (mode, 0, offset, win); if (! reload_in_progress && ! reload_completed) offset = force_reg (Pmode, offset); else abort (); win: if (GET_CODE (offset) == CONST_INT) return plus_constant_for_output (base, INTVAL (offset)); } if (GET_MODE_SIZE (mode) > 4 && (GET_MODE_CLASS (mode) == MODE_INT || TARGET_SOFT_FLOAT)) { emit_insn (gen_addsi3 (reg, base, offset)); return reg; } return gen_rtx (PLUS, Pmode, base, offset); } else if (GET_CODE (orig) == LABEL_REF) current_function_uses_pic_offset_table = 1; return orig;}static rtx pic_rtx;intis_pic(x) rtx x;{ if (x == pic_rtx) return 1; return 0;}voidarm_finalize_pic (){#ifndef AOF_ASSEMBLER rtx l1, pic_tmp, pic_tmp2, seq; rtx global_offset_table; if (current_function_uses_pic_offset_table == 0) return; if (! flag_pic) abort (); start_sequence (); l1 = gen_label_rtx (); global_offset_table = gen_rtx (SYMBOL_REF, Pmode, "_GLOBAL_OFFSET_TABLE_"); /* The PC contains 'dot'+8, but the label L1 is on the next instruction, so the offset is only 'dot'+4. */ pic_tmp = gen_rtx (CONST, VOIDmode, gen_rtx (PLUS, Pmode, gen_rtx (LABEL_REF, VOIDmode, l1), GEN_INT (4))); pic_tmp2 = gen_rtx (CONST, VOIDmode, gen_rtx (PLUS, Pmode, global_offset_table, pc_rtx)); pic_rtx = gen_rtx (CONST, Pmode, gen_rtx (MINUS, Pmode, pic_tmp2, pic_tmp)); emit_insn (gen_pic_load_addr (pic_offset_table_rtx, pic_rtx)); emit_jump_insn (gen_pic_add_dot_plus_eight(l1, pic_offset_table_rtx)); emit_label (l1); seq = gen_sequence (); end_sequence (); emit_insn_after (seq, get_insns ()); /* Need to emit this whether or not we obey regdecls, since setjmp/longjmp can cause life info to screw up. */ emit_insn (gen_rtx (USE, VOIDmode, pic_offset_table_rtx));#endif /* AOF_ASSEMBLER */}#define REG_OR_SUBREG_REG(X) \ (GET_CODE (X) == REG \ || (GET_CODE (X) == SUBREG && GET_CODE (SUBREG_REG (X)) == REG))#define REG_OR_SUBREG_RTX(X) \ (GET_CODE (X) == REG ? (X) : SUBREG_REG (X))#define ARM_FRAME_RTX(X) \ ((X) == frame_pointer_rtx || (X) == stack_pointer_rtx \ || (X) == arg_pointer_rtx)intarm_rtx_costs (x, code, outer_code) rtx x; enum rtx_code code, outer_code;{ enum machine_mode mode = GET_MODE (x); enum rtx_code subcode; int extra_cost; switch (code) { case MEM: /* Memory costs quite a lot for the first word, but subsequent words load at the equivalent of a single insn each. */ return (10 + 4 * ((GET_MODE_SIZE (mode) - 1) / UNITS_PER_WORD) + (CONSTANT_POOL_ADDRESS_P (x) ? 4 : 0)); case DIV: case MOD: return 100; case ROTATE: if (mode == SImode && GET_CODE (XEXP (x, 1)) == REG) return 4; /* Fall through */ case ROTATERT: if (mode != SImode) return 8; /* Fall through */ case ASHIFT: case LSHIFTRT: case ASHIFTRT: if (mode == DImode) return (8 + (GET_CODE (XEXP (x, 1)) == CONST_INT ? 0 : 8) + ((GET_CODE (XEXP (x, 0)) == REG || (GET_CODE (XEXP (x, 0)) == SUBREG && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG)) ? 0 : 8)); return (1 + ((GET_CODE (XEXP (x, 0)) == REG || (GET_CODE (XEXP (x, 0)) == SUBREG && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG)) ? 0 : 4) + ((GET_CODE (XEXP (x, 1)) == REG || (GET_CODE (XEXP (x, 1)) == SUBREG && GET_CODE (SUBREG_REG (XEXP (x, 1))) == REG) || (GET_CODE (XEXP (x, 1)) == CONST_INT)) ? 0 : 4)); case MINUS: if (mode == DImode) return (4 + (REG_OR_SUBREG_REG (XEXP (x, 1)) ? 0 : 8) + ((REG_OR_SUBREG_REG (XEXP (x, 0)) || (GET_CODE (XEXP (x, 0)) == CONST_INT && const_ok_for_arm (INTVAL (XEXP (x, 0))))) ? 0 : 8)); if (GET_MODE_CLASS (mode) == MODE_FLOAT) return (2 + ((REG_OR_SUBREG_REG (XEXP (x, 1)) || (GET_CODE (XEXP (x, 1)) == CONST_DOUBLE && const_double_rtx_ok_for_fpu (XEXP (x, 1)))) ? 0 : 8) + ((REG_OR_SUBREG_REG (XEXP (x, 0)) || (GET_CODE (XEXP (x, 0)) == CONST_DOUBLE && const_double_rtx_ok_for_fpu (XEXP (x, 0)))) ? 0 : 8)); if (((GET_CODE (XEXP (x, 0)) == CONST_INT && const_ok_for_arm (INTVAL (XEXP (x, 0))) && REG_OR_SUBREG_REG (XEXP (x, 1)))) || (((subcode = GET_CODE (XEXP (x, 1))) == ASHIFT || subcode == ASHIFTRT || subcode == LSHIFTRT || subcode == ROTATE || subcode == ROTATERT || (subcode == MULT && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT && ((INTVAL (XEXP (XEXP (x, 1), 1)) & (INTVAL (XEXP (XEXP (x, 1), 1)) - 1)) == 0))) && REG_OR_SUBREG_REG (XEXP (XEXP (x, 1), 0)) && (REG_OR_SUBREG_REG (XEXP (XEXP (x, 1), 1)) || GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT) && REG_OR_SUBREG_REG (XEXP (x, 0)))) return 1; /* Fall through */ case PLUS: if (GET_MODE_CLASS (mode) == MODE_FLOAT) return (2 + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 8) + ((REG_OR_SUBREG_REG (XEXP (x, 1)) || (GET_CODE (XEXP (x, 1)) == CONST_DOUBLE && const_double_rtx_ok_for_fpu (XEXP (x, 1)))) ? 0 : 8)); /* Fall through */ case AND: case XOR: case IOR: extra_cost = 0; /* Normally the frame registers will be spilt into reg+const during reload, so it is a bad idea to combine them with other instructions, since then they might not be moved outside of loops. As a compromise we allow integration with ops that have a constant as their second operand. */ if ((REG_OR_SUBREG_REG (XEXP (x, 0)) && ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x, 0))) && GET_CODE (XEXP (x, 1)) != CONST_INT) || (REG_OR_SUBREG_REG (XEXP (x, 0)) && ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x, 0))))) extra_cost = 4; if (mode == DImode) return (4 + extra_cost + (REG_OR_SUBREG_REG (XEXP (x, 0)) ? 0 : 8) + ((REG_OR_SUBREG_REG (XEXP (x, 1)) || (GET_CODE (XEXP (x, 1)) == CONST_INT && const_ok_for_op (INTVAL (XEXP (x, 1)), code, mode))) ? 0 : 8)); if (REG_OR_SUBREG_REG (XEXP (x, 0))) return (1 + (GET_CODE (XEXP (x, 1)) == CONST_INT ? 0 : extra_cost) + ((REG_OR_SUBREG_REG (XEXP (x, 1)) || (GET_CODE (XEXP (x, 1)) == CONST_INT && const_ok_for_op (INTVAL (XEXP (x, 1)), code, mode))) ? 0 : 4)); else if (REG_OR_SUBREG_REG (XEXP (x, 1))) return (1 + extra_cost + ((((subcode = GET_CODE (XEXP (x, 0))) == ASHIFT || subcode == LSHIFTRT || subcode == ASHIFTRT || subcode == ROTATE || subcode == ROTATERT || (subcode == MULT && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT && ((INTVAL (XEXP (XEXP (x, 0), 1)) & (INTVAL (XEXP (XEXP (x, 0), 1)) - 1)) == 0)) && (REG_OR_SUBREG_REG (XEXP (XEXP (x, 0), 0))) && ((REG_OR_SUBREG_REG (XEXP (XEXP (x, 0), 1))) || GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)))
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -