📄 mt.c
字号:
/* If there is nothing to save, get out now. */ if (! info.save_fp && ! info.save_lr && ! reg_mask) return; /* If offset doesn't fit in a 15-bit signed integer, uses a scratch registers to get a smaller offset. */ if (CONST_OK_FOR_LETTER_P(offset, 'O')) base_reg = stack_pointer_rtx; else { /* Use the scratch register R9 that holds old stack pointer. */ base_reg = gen_rtx_REG (SImode, GPR_R9); offset = 0; } if (info.save_fp) { /* This just records the space for it, the actual move generated in mt_emit_save_fp (). */ offset -= UNITS_PER_WORD; stack_offset -= UNITS_PER_WORD; } if (info.save_lr) { offset -= UNITS_PER_WORD; stack_offset -= UNITS_PER_WORD; mt_emit_save_restore (direction, gen_rtx_REG (SImode, GPR_LINK), gen_rtx_MEM (SImode, gen_rtx_PLUS (SImode, base_reg, GEN_INT (offset))), stack_offset); } /* Save any needed call-saved regs. */ for (regno = GPR_R0; regno <= GPR_LAST; regno++) { if ((reg_mask & (1 << regno)) != 0) { offset -= UNITS_PER_WORD; stack_offset -= UNITS_PER_WORD; mt_emit_save_restore (direction, gen_rtx_REG (SImode, regno), gen_rtx_MEM (SImode, gen_rtx_PLUS (SImode, base_reg, GEN_INT (offset))), stack_offset); } }}/* Return true if FUNC is a function with the 'interrupt' attribute. */static boolmt_interrupt_function_p (tree func){ tree a; if (TREE_CODE (func) != FUNCTION_DECL) return false; a = lookup_attribute ("interrupt", DECL_ATTRIBUTES (func)); return a != NULL_TREE;}/* Generate prologue code. */voidmt_expand_prologue (void){ rtx size_rtx, insn; unsigned int frame_size; if (mt_interrupt_function_p (current_function_decl)) { interrupt_handler = 1; if (cfun->machine) cfun->machine->interrupt_handler = 1; } mt_compute_frame_size (get_frame_size ()); if (TARGET_DEBUG_STACK) mt_debug_stack (¤t_frame_info); /* Compute size of stack adjustment. */ frame_size = current_frame_info.total_size; /* If offset doesn't fit in a 15-bit signed integer, uses a scratch registers to get a smaller offset. */ if (CONST_OK_FOR_LETTER_P(frame_size, 'O')) size_rtx = GEN_INT (frame_size); else { /* We do not have any scratch registers. */ gcc_assert (!interrupt_handler); size_rtx = gen_rtx_REG (SImode, GPR_R9); insn = emit_move_insn (size_rtx, GEN_INT (frame_size & 0xffff0000)); insn = emit_insn (gen_iorsi3 (size_rtx, size_rtx, GEN_INT (frame_size & 0x0000ffff))); } /* Allocate stack for this frame. */ /* Make stack adjustment and use scratch register if constant too large to fit as immediate. */ if (frame_size) { insn = emit_insn (gen_subsi3 (stack_pointer_rtx, stack_pointer_rtx, size_rtx)); RTX_FRAME_RELATED_P (insn) = 1; REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR, gen_rtx_SET (VOIDmode, stack_pointer_rtx, gen_rtx_MINUS (SImode, stack_pointer_rtx, GEN_INT (frame_size))), REG_NOTES (insn)); } /* Set R9 to point to old sp if required for access to register save area. */ if ( current_frame_info.reg_size != 0 && !CONST_OK_FOR_LETTER_P (frame_size, 'O')) emit_insn (gen_addsi3 (size_rtx, size_rtx, stack_pointer_rtx)); /* Save the frame pointer. */ mt_emit_save_fp (FROM_PROCESSOR_TO_MEM, current_frame_info); /* Now put the frame pointer into the frame pointer register. */ if (frame_pointer_needed) { insn = emit_move_insn (frame_pointer_rtx, stack_pointer_rtx); RTX_FRAME_RELATED_P (insn) = 1; } /* Save the registers. */ mt_emit_save_regs (FROM_PROCESSOR_TO_MEM, current_frame_info); /* If we are profiling, make sure no instructions are scheduled before the call to mcount. */ if (profile_flag) emit_insn (gen_blockage ());}/* Implement EPILOGUE_USES. */intmt_epilogue_uses (int regno){ if (cfun->machine && cfun->machine->interrupt_handler && reload_completed) return 1; return regno == GPR_LINK;}/* Generate epilogue. EH_MODE is NORMAL_EPILOGUE when generating a function epilogue, or EH_EPILOGUE when generating an EH epilogue. */voidmt_expand_epilogue (enum epilogue_type eh_mode){ rtx size_rtx, insn; unsigned frame_size; mt_compute_frame_size (get_frame_size ()); if (TARGET_DEBUG_STACK) mt_debug_stack (& current_frame_info); /* Compute size of stack adjustment. */ frame_size = current_frame_info.total_size; /* If offset doesn't fit in a 15-bit signed integer, uses a scratch registers to get a smaller offset. */ if (CONST_OK_FOR_LETTER_P(frame_size, 'O')) size_rtx = GEN_INT (frame_size); else { /* We do not have any scratch registers. */ gcc_assert (!interrupt_handler); size_rtx = gen_rtx_REG (SImode, GPR_R9); insn = emit_move_insn (size_rtx, GEN_INT (frame_size & 0xffff0000)); insn = emit_insn (gen_iorsi3 (size_rtx, size_rtx, GEN_INT (frame_size & 0x0000ffff))); /* Set R9 to point to old sp if required for access to register save area. */ emit_insn (gen_addsi3 (size_rtx, size_rtx, stack_pointer_rtx)); } /* Restore sp if there was some possible change to it. */ if (frame_pointer_needed) insn = emit_move_insn (stack_pointer_rtx, frame_pointer_rtx); /* Restore the registers. */ mt_emit_save_fp (FROM_MEM_TO_PROCESSOR, current_frame_info); mt_emit_save_regs (FROM_MEM_TO_PROCESSOR, current_frame_info); /* Make stack adjustment and use scratch register if constant too large to fit as immediate. */ if (frame_size) { if (CONST_OK_FOR_LETTER_P(frame_size, 'O')) /* Can handle this with simple add. */ insn = emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx, size_rtx)); else /* Scratch reg R9 has the old sp value. */ insn = emit_move_insn (stack_pointer_rtx, gen_rtx_REG (SImode, GPR_R9)); REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR, gen_rtx_SET (VOIDmode, stack_pointer_rtx, gen_rtx_PLUS (SImode, stack_pointer_rtx, GEN_INT (frame_size))), REG_NOTES (insn)); } if (cfun->machine && cfun->machine->eh_stack_adjust != NULL_RTX) /* Perform the additional bump for __throw. */ emit_insn (gen_addsi3 (stack_pointer_rtx, stack_pointer_rtx, cfun->machine->eh_stack_adjust)); /* Generate the appropriate return. */ if (eh_mode == EH_EPILOGUE) { emit_jump_insn (gen_eh_return_internal ()); emit_barrier (); } else if (interrupt_handler) emit_jump_insn (gen_return_interrupt_internal ()); else emit_jump_insn (gen_return_internal ()); /* Reset state info for each function. */ interrupt_handler = 0; current_frame_info = zero_frame_info; if (cfun->machine) cfun->machine->eh_stack_adjust = NULL_RTX;}/* Generate code for the "eh_return" pattern. */voidmt_expand_eh_return (rtx * operands){ if (GET_CODE (operands[0]) != REG || REGNO (operands[0]) != EH_RETURN_STACKADJ_REGNO) { rtx sp = EH_RETURN_STACKADJ_RTX; emit_move_insn (sp, operands[0]); operands[0] = sp; } emit_insn (gen_eh_epilogue (operands[0]));}/* Generate code for the "eh_epilogue" pattern. */voidmt_emit_eh_epilogue (rtx * operands ATTRIBUTE_UNUSED){ cfun->machine->eh_stack_adjust = EH_RETURN_STACKADJ_RTX; /* operands[0]; */ mt_expand_epilogue (EH_EPILOGUE);}/* Handle an "interrupt" attribute. */static treemt_handle_interrupt_attribute (tree * node, tree name, tree args ATTRIBUTE_UNUSED, int flags ATTRIBUTE_UNUSED, bool * no_add_attrs){ if (TREE_CODE (*node) != FUNCTION_DECL) { warning (OPT_Wattributes, "%qs attribute only applies to functions", IDENTIFIER_POINTER (name)); *no_add_attrs = true; } return NULL_TREE;}/* Table of machine attributes. */const struct attribute_spec mt_attribute_table[] ={ /* name, min, max, decl?, type?, func?, handler */ { "interrupt", 0, 0, false, false, false, mt_handle_interrupt_attribute }, { NULL, 0, 0, false, false, false, NULL }};/* Implement INITIAL_ELIMINATION_OFFSET. */intmt_initial_elimination_offset (int from, int to){ mt_compute_frame_size (get_frame_size ()); if (from == FRAME_POINTER_REGNUM && to == STACK_POINTER_REGNUM) return 0; else if (from == ARG_POINTER_REGNUM && to == STACK_POINTER_REGNUM) return current_frame_info.total_size; else if (from == ARG_POINTER_REGNUM && to == FRAME_POINTER_REGNUM) return current_frame_info.total_size; else gcc_unreachable ();}/* Generate a compare for CODE. Return a brand-new rtx that represents the result of the compare. */static rtxmt_generate_compare (enum rtx_code code, rtx op0, rtx op1){ rtx scratch0, scratch1, const_scratch; switch (code) { case GTU: case LTU: case GEU: case LEU: /* Need to adjust ranges for faking unsigned compares. */ scratch0 = gen_reg_rtx (SImode); scratch1 = gen_reg_rtx (SImode); const_scratch = force_reg (SImode, GEN_INT(MT_MIN_INT)); emit_insn (gen_addsi3 (scratch0, const_scratch, op0)); emit_insn (gen_addsi3 (scratch1, const_scratch, op1)); break; default: scratch0 = op0; scratch1 = op1; break; } /* Adjust compare operator to fake unsigned compares. */ switch (code) { case GTU: code = GT; break; case LTU: code = LT; break; case GEU: code = GE; break; case LEU: code = LE; break; default: /* do nothing */ break; } /* Generate the actual compare. */ return gen_rtx_fmt_ee (code, VOIDmode, scratch0, scratch1);}/* Emit a branch of kind CODE to location LOC. */voidmt_emit_cbranch (enum rtx_code code, rtx loc, rtx op0, rtx op1){ rtx condition_rtx, loc_ref; if (! reg_or_0_operand (op0, SImode)) op0 = copy_to_mode_reg (SImode, op0); if (! reg_or_0_operand (op1, SImode)) op1 = copy_to_mode_reg (SImode, op1); condition_rtx = mt_generate_compare (code, op0, op1); loc_ref = gen_rtx_LABEL_REF (VOIDmode, loc); emit_jump_insn (gen_rtx_SET (VOIDmode, pc_rtx, gen_rtx_IF_THEN_ELSE (VOIDmode, condition_rtx, loc_ref, pc_rtx)));}/* Subfunction of the following function. Update the flags of any MEM found in part of X. */static voidmt_set_memflags_1 (rtx x, int in_struct_p, int volatile_p){ int i; switch (GET_CODE (x)) { case SEQUENCE: case PARALLEL: for (i = XVECLEN (x, 0) - 1; i >= 0; i--) mt_set_memflags_1 (XVECEXP (x, 0, i), in_struct_p, volatile_p); break; case INSN: mt_set_memflags_1 (PATTERN (x), in_struct_p, volatile_p); break; case SET: mt_set_memflags_1 (SET_DEST (x), in_struct_p, volatile_p); mt_set_memflags_1 (SET_SRC (x), in_struct_p, volatile_p); break; case MEM: MEM_IN_STRUCT_P (x) = in_struct_p; MEM_VOLATILE_P (x) = volatile_p; /* Sadly, we cannot use alias sets because the extra aliasing produced by the AND interferes. Given that two-byte quantities are the only thing we would be able to differentiate anyway, there does not seem to be any point in convoluting the early out of the alias check. */ /* set_mem_alias_set (x, alias_set); */ break; default: break; }}/* Look for any MEMs in the current sequence of insns and set the in-struct, unchanging, and volatile flags from the flags in REF. If REF is not a MEM, don't do anything. */voidmt_set_memflags (rtx ref){ rtx insn; int in_struct_p, volatile_p; if (GET_CODE (ref) != MEM) return; in_struct_p = MEM_IN_STRUCT_P (ref); volatile_p = MEM_VOLATILE_P (ref); /* This is only called from mt.md, after having had something generated from one of the insn patterns. So if everything is zero, the pattern is already up-to-date. */ if (! in_struct_p && ! volatile_p) return; for (insn = get_insns (); insn; insn = NEXT_INSN (insn)) mt_set_memflags_1 (insn, in_struct_p, volatile_p);}/* Implement SECONDARY_RELOAD_CLASS. */enum reg_classmt_secondary_reload_class (enum reg_class class ATTRIBUTE_UNUSED, enum machine_mode mode, rtx x){ if ((mode == QImode && (!TARGET_BYTE_ACCESS)) || mode == HImode) { if (GET_CODE (x) == MEM || (GET_CODE (x) == REG && true_regnum (x) == -1) || (GET_CODE (x) == SUBREG && (GET_CODE (SUBREG_REG (x)) == MEM || (GET_CODE (SUBREG_REG (x)) == REG && true_regnum (SUBREG_REG (x)) == -1)))) return GENERAL_REGS; } return NO_REGS;}/* Handle FUNCTION_VALUE, FUNCTION_OUTGOING_VALUE, and LIBCALL_VALUE macros. */rtxmt_function_value (tree valtype, enum machine_mode mode, tree func_decl ATTRIBUTE_UNUSED){ if ((mode) == DImode || (mode) == DFmode) return gen_rtx_MEM (mode, gen_rtx_REG (mode, RETURN_VALUE_REGNUM)); if (valtype) mode = TYPE_MODE (valtype); return gen_rtx_REG (mode, RETURN_VALUE_REGNUM);}/* Split a move into two smaller pieces. MODE indicates the reduced mode. OPERANDS[0] is the original destination OPERANDS[1] is the original src. The new destinations are OPERANDS[2] and OPERANDS[4], while the new sources are OPERANDS[3] and OPERANDS[5]. */voidmt_split_words (enum machine_mode nmode, enum machine_mode omode, rtx *operands){ rtx dl,dh; /* src/dest pieces. */
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -