📄 mips.c
字号:
if (TARGET_MEMCPY) block_move_call (dest_reg, src_reg, bytes_rtx);#if 0 else if (constp && bytes <= 3*align) block_move_sequence (dest_reg, src_reg, bytes, align, orig_src);#endif else if (constp && bytes <= 2*MAX_MOVE_BYTES) emit_insn (gen_movstrsi_internal (gen_rtx (MEM, BLKmode, dest_reg), gen_rtx (MEM, BLKmode, src_reg), bytes_rtx, align_rtx)); else if (constp && align >= UNITS_PER_WORD && optimize) block_move_loop (dest_reg, src_reg, bytes, align, orig_src); else if (constp && optimize) { /* If the alignment is not word aligned, generate a test at runtime, to see whether things wound up aligned, and we can use the faster lw/sw instead ulw/usw. */ rtx temp = gen_reg_rtx (Pmode); rtx aligned_label = gen_label_rtx (); rtx join_label = gen_label_rtx (); int leftover = bytes % MAX_MOVE_BYTES; bytes -= leftover; emit_insn (gen_iorsi3 (temp, src_reg, dest_reg)); emit_insn (gen_andsi3 (temp, temp, GEN_INT (UNITS_PER_WORD-1))); emit_insn (gen_cmpsi (temp, const0_rtx)); emit_jump_insn (gen_beq (aligned_label)); /* Unaligned loop. */ block_move_loop (dest_reg, src_reg, bytes, 1, orig_src); emit_jump_insn (gen_jump (join_label)); emit_barrier (); /* Aligned loop. */ emit_label (aligned_label); block_move_loop (dest_reg, src_reg, bytes, UNITS_PER_WORD, orig_src); emit_label (join_label); /* Bytes at the end of the loop. */ if (leftover) {#if 0 if (leftover <= 3*align) block_move_sequence (dest_reg, src_reg, leftover, align, orig_src); else#endif emit_insn (gen_movstrsi_internal (gen_rtx (MEM, BLKmode, dest_reg), gen_rtx (MEM, BLKmode, src_reg), GEN_INT (leftover), GEN_INT (align))); } } else block_move_call (dest_reg, src_reg, bytes_rtx);}/* Emit load/stores for a small constant block_move. operands[0] is the memory address of the destination. operands[1] is the memory address of the source. operands[2] is the number of bytes to move. operands[3] is the alignment. operands[4] is a temp register. operands[5] is a temp register. ... operands[3+num_regs] is the last temp register. The block move type can be one of the following: BLOCK_MOVE_NORMAL Do all of the block move. BLOCK_MOVE_NOT_LAST Do all but the last store. BLOCK_MOVE_LAST Do just the last store. */char *output_block_move (insn, operands, num_regs, move_type) rtx insn; rtx operands[]; int num_regs; enum block_move_type move_type;{ rtx dest_reg = XEXP (operands[0], 0); rtx src_reg = XEXP (operands[1], 0); int bytes = INTVAL (operands[2]); int align = INTVAL (operands[3]); int num = 0; int offset = 0; int use_lwl_lwr = FALSE; int last_operand = num_regs+4; int i; rtx xoperands[10]; struct { char *load; /* load insn without nop */ char *load_nop; /* load insn with trailing nop */ char *store; /* store insn */ char *final; /* if last_store used: NULL or swr */ char *last_store; /* last store instruction */ int offset; /* current offset */ enum machine_mode mode; /* mode to use on (MEM) */ } load_store[4]; /* Detect a bug in GCC, where it can give us a register the same as one of the addressing registers. */ for (i = 4; i < last_operand; i++) { if (reg_mentioned_p (operands[i], operands[0]) || reg_mentioned_p (operands[i], operands[1])) { abort_with_insn (insn, "register passed as address and temp register to block move"); } } /* If we are given global or static addresses, and we would be emitting a few instructions, try to save time by using a temporary register for the pointer. */ if (bytes > 2*align || move_type != BLOCK_MOVE_NORMAL) { if (CONSTANT_P (src_reg)) { if (TARGET_STATS) mips_count_memory_refs (operands[1], 1); src_reg = operands[ 3 + num_regs-- ]; if (move_type != BLOCK_MOVE_LAST) { xoperands[1] = operands[1]; xoperands[0] = src_reg; output_asm_insn ("la\t%0,%1", xoperands); } } if (CONSTANT_P (dest_reg)) { if (TARGET_STATS) mips_count_memory_refs (operands[0], 1); dest_reg = operands[ 3 + num_regs-- ]; if (move_type != BLOCK_MOVE_LAST) { xoperands[1] = operands[0]; xoperands[0] = dest_reg; output_asm_insn ("la\t%0,%1", xoperands); } } } if (num_regs > (sizeof (load_store) / sizeof (load_store[0]))) num_regs = (sizeof (load_store) / sizeof (load_store[0])); else if (num_regs < 1) abort (); if (TARGET_GAS && move_type != BLOCK_MOVE_LAST && set_noreorder++ == 0) output_asm_insn (".set\tnoreorder", operands); while (bytes > 0) { load_store[num].offset = offset; if (bytes >= UNITS_PER_WORD && align >= UNITS_PER_WORD) { load_store[num].load = "lw\t%0,%1"; load_store[num].load_nop = "lw\t%0,%1%#"; load_store[num].store = "sw\t%0,%1"; load_store[num].last_store = "sw\t%0,%1"; load_store[num].final = (char *)0; load_store[num].mode = SImode; offset += UNITS_PER_WORD; bytes -= UNITS_PER_WORD; } else if (bytes >= UNITS_PER_WORD) {#if BYTES_BIG_ENDIAN load_store[num].load = "lwl\t%0,%1\n\tlwr\t%0,%2"; load_store[num].load_nop = "lwl\t%0,%1\n\tlwr\t%0,%2%#"; load_store[num].store = "swl\t%0,%1\n\tswr\t%0,%2"; load_store[num].last_store = "swr\t%0,%2"; load_store[num].final = "swl\t%0,%1";#else load_store[num].load = "lwl\t%0,%2\n\tlwr\t%0,%1"; load_store[num].load_nop = "lwl\t%0,%2\n\tlwr\t%0,%1%#"; load_store[num].store = "swl\t%0,%2\n\tswr\t%0,%1"; load_store[num].last_store = "swr\t%0,%1"; load_store[num].final = "swl\t%0,%2";#endif load_store[num].mode = SImode; offset += UNITS_PER_WORD; bytes -= UNITS_PER_WORD; use_lwl_lwr = TRUE; } else if (bytes >= UNITS_PER_SHORT && align >= UNITS_PER_SHORT) { load_store[num].load = "lh\t%0,%1"; load_store[num].load_nop = "lh\t%0,%1%#"; load_store[num].store = "sh\t%0,%1"; load_store[num].last_store = "sh\t%0,%1"; load_store[num].final = (char *)0; load_store[num].offset = offset; load_store[num].mode = HImode; offset += UNITS_PER_SHORT; bytes -= UNITS_PER_SHORT; } else { load_store[num].load = "lb\t%0,%1"; load_store[num].load_nop = "lb\t%0,%1%#"; load_store[num].store = "sb\t%0,%1"; load_store[num].last_store = "sb\t%0,%1"; load_store[num].final = (char *)0; load_store[num].mode = QImode; offset++; bytes--; } if (TARGET_STATS && move_type != BLOCK_MOVE_LAST) { dslots_load_total++; dslots_load_filled++; if (CONSTANT_P (src_reg)) mips_count_memory_refs (src_reg, 1); if (CONSTANT_P (dest_reg)) mips_count_memory_refs (dest_reg, 1); } /* Emit load/stores now if we have run out of registers or are at the end of the move. */ if (++num == num_regs || bytes == 0) { /* If only load/store, we need a NOP after the load. */ if (num == 1) { load_store[0].load = load_store[0].load_nop; if (TARGET_STATS && move_type != BLOCK_MOVE_LAST) dslots_load_filled--; } if (move_type != BLOCK_MOVE_LAST) { for (i = 0; i < num; i++) { int offset; if (!operands[i+4]) abort (); if (GET_MODE (operands[i+4]) != load_store[i].mode) operands[i+4] = gen_rtx (REG, load_store[i].mode, REGNO (operands[i+4])); offset = load_store[i].offset; xoperands[0] = operands[i+4]; xoperands[1] = gen_rtx (MEM, load_store[i].mode, plus_constant (src_reg, offset)); if (use_lwl_lwr) xoperands[2] = gen_rtx (MEM, load_store[i].mode, plus_constant (src_reg, UNITS_PER_WORD-1+offset)); output_asm_insn (load_store[i].load, xoperands); } } for (i = 0; i < num; i++) { int last_p = (i == num-1 && bytes == 0); int offset = load_store[i].offset; xoperands[0] = operands[i+4]; xoperands[1] = gen_rtx (MEM, load_store[i].mode, plus_constant (dest_reg, offset)); if (use_lwl_lwr) xoperands[2] = gen_rtx (MEM, load_store[i].mode, plus_constant (dest_reg, UNITS_PER_WORD-1+offset)); if (move_type == BLOCK_MOVE_NORMAL) output_asm_insn (load_store[i].store, xoperands); else if (move_type == BLOCK_MOVE_NOT_LAST) { if (!last_p) output_asm_insn (load_store[i].store, xoperands); else if (load_store[i].final != (char *)0) output_asm_insn (load_store[i].final, xoperands); } else if (last_p) output_asm_insn (load_store[i].last_store, xoperands); } num = 0; /* reset load_store */ use_lwl_lwr = FALSE; /* reset whether or not we used lwl/lwr */ } } if (TARGET_GAS && move_type != BLOCK_MOVE_LAST && --set_noreorder == 0) output_asm_insn (".set\treorder", operands); return "";}/* Argument support functions. *//* Initialize CUMULATIVE_ARGS for a function. */voidinit_cumulative_args (cum, fntype, libname) CUMULATIVE_ARGS *cum; /* argument info to initialize */ tree fntype; /* tree ptr for function decl */ rtx libname; /* SYMBOL_REF of library name or 0 */{ static CUMULATIVE_ARGS zero_cum; tree param, next_param; if (TARGET_DEBUG_E_MODE) { fprintf (stderr, "\ninit_cumulative_args, fntype = 0x%.8lx", (long)fntype); if (!fntype) fputc ('\n', stderr); else { tree ret_type = TREE_TYPE (fntype); fprintf (stderr, ", fntype code = %s, ret code = %s\n", tree_code_name[ (int)TREE_CODE (fntype) ], tree_code_name[ (int)TREE_CODE (ret_type) ]); } } *cum = zero_cum; /* Determine if this function has variable arguments. This is indicated by the last argument being 'void_type_mode' if there are no variable arguments. The standard MIPS calling sequence passes all arguments in the general purpose registers in this case. */ for (param = (fntype) ? TYPE_ARG_TYPES (fntype) : 0; param != (tree)0; param = next_param) { next_param = TREE_CHAIN (param); if (next_param == (tree)0 && TREE_VALUE (param) != void_type_node) cum->gp_reg_found = 1; }}/* Advance the argument to the next argument position. */voidfunction_arg_advance (cum, mode, type, named) CUMULATIVE_ARGS *cum; /* current arg information */ enum machine_mode mode; /* current arg mode */ tree type; /* type of the argument or 0 if lib support */ int named; /* whether or not the argument was named */{ if (TARGET_DEBUG_E_MODE) fprintf (stderr, "function_adv( {gp reg found = %d, arg # = %2d, words = %2d}, %4s, 0x%.8x, %d )\n\n", cum->gp_reg_found, cum->arg_number, cum->arg_words, GET_MODE_NAME (mode), type, named); cum->arg_number++; switch (mode) { default: error ("Illegal mode given to function_arg_advance"); break; case VOIDmode: break; case BLKmode: cum->gp_reg_found = 1; cum->arg_words += (int_size_in_bytes (type) + 3) / 4; break; case SFmode: cum->arg_words++; break; case DFmode: cum->arg_words += 2; break; case DImode: cum->gp_reg_found = 1; cum->arg_words += 2; break; case QImode: case HImode: case SImode: cum->gp_reg_found = 1; cum->arg_words++; break; }}/* Return a RTL expression containing the register for the given mode, or 0 if the argument is too be passed on the stack. */struct rtx_def *function_arg (cum, mode, type, named) CUMULATIVE_ARGS *cum; /* current arg information */ enum machine_mode mode; /* current arg mode */ tree type; /* type of the argument or 0 if lib support */ int named; /* != 0 for normal args, == 0 for ... args */{ rtx ret; int regbase = -1; int bias = 0; int struct_p = ((type != (tree)0) && (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE)); if (TARGET_DEBUG_E_MODE) fprintf (stderr, "function_arg( {gp reg found = %d, arg # = %2d, words = %2d}, %4s, 0x%.8x, %d ) = ", cum->gp_reg_found, cum->arg_number, cum->arg_words, GET_MODE_NAME (mode), type, named); switch (mode) { default: error ("Illegal mode given to function_arg"); break; case SFmode: if (cum->gp_reg_found || cum->arg_number >= 2) regbase = GP_ARG_FIRST; else { regbase = (TARGET_SOFT_FLOAT) ? GP_ARG_FIRST : FP_ARG_FIRST; if (cum->arg_words == 1) /* first arg was float */ bias = 1; /* use correct reg */ } break; case DFmode: cum->arg_words += (cum->arg_words & 1); regbase = (cum->gp_reg_found || TARGET_SOFT_FLOAT) ? GP_ARG_FIRST : FP_ARG_FIRST; break; case BLKmode: if (type != (tree)0 && TYPE_ALIGN (type) > BITS_PER_WORD) cum->arg_words += (cum->arg_words & 1); regbase = GP_ARG_FIRST; break; case VOIDmode: case QImode: case HImode: case SImode: regbase = GP_ARG_FIRST; break; case DImode: cum->arg_words += (cum->arg_words & 1); regbase = GP_ARG_FIRST; } if (cum->arg_words >= MAX_ARGS_IN_REGISTERS) { if (TARGET_DEBUG_E_MODE) fprintf (stderr, "<stack>%s\n", struct_p ? ", [struct]" : ""); ret = (rtx)0; } else { if (regbase == -1) abort (); ret = gen_rtx (REG, mode, regbase + cum->arg_words + bias); if (TARGET_DEBUG_E_MODE) fprintf (stderr, "%s%s\n", reg_names[regbase + cum->arg_words + bias], struct_p ? ", [struct]" : ""); /* The following is a hack in order to pass 1 byte structures the same way that the MIPS compiler does (namely by passing the structure in the high byte or half word of the register). This also makes varargs work. If we have such a structure, we save the adjustment RTL, and the call define expands will emit them. For the VOIDmode argument (argument after the last real argument, pass back a parallel vector holding each of the adjustments. */ if (struct_p && (mode == QImode || mode == HImode)) { rtx amount = GEN_INT (BITS_PER_WORD - GET_MODE_BITSIZE (mode)); rtx reg = gen_rtx (REG, SImode, regbase + cum->arg_words + bias); cum->adjust[ cum->num_adjusts++ ] = gen_ashlsi3 (reg, reg, amount); } }
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -