📄 calls.c
字号:
push_temp_slots (); funexp = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0); pop_temp_slots (); /* FUNEXP can't be BLKmode */ emit_queue (); } /* Figure out the register where the value, if any, will come back. */ valreg = 0; if (TYPE_MODE (TREE_TYPE (exp)) != VOIDmode && ! structure_value_addr) { if (pcc_struct_value) valreg = hard_function_value (build_pointer_type (TREE_TYPE (exp)), fndecl); else valreg = hard_function_value (TREE_TYPE (exp), fndecl); } /* Precompute all register parameters. It isn't safe to compute anything once we have started filling any specific hard regs. */ reg_parm_seen = 0; for (i = 0; i < num_actuals; i++) if (args[i].reg != 0 && ! args[i].pass_on_stack) { reg_parm_seen = 1; if (args[i].value == 0) { push_temp_slots (); args[i].value = expand_expr (args[i].tree_value, NULL_RTX, VOIDmode, 0); preserve_temp_slots (args[i].value); pop_temp_slots (); /* ANSI doesn't require a sequence point here, but PCC has one, so this will avoid some problems. */ emit_queue (); } /* If we are to promote the function arg to a wider mode, do it now. */ if (args[i].mode != TYPE_MODE (TREE_TYPE (args[i].tree_value))) args[i].value = convert_modes (args[i].mode, TYPE_MODE (TREE_TYPE (args[i].tree_value)), args[i].value, args[i].unsignedp); /* If the value is expensive, and we are inside an appropriately short loop, put the value into a pseudo and then put the pseudo into the hard reg. For small register classes, also do this if this call uses register parameters. This is to avoid reload conflicts while loading the parameters registers. */ if ((! (GET_CODE (args[i].value) == REG || (GET_CODE (args[i].value) == SUBREG && GET_CODE (SUBREG_REG (args[i].value)) == REG))) && args[i].mode != BLKmode && rtx_cost (args[i].value, SET) > 2#ifdef SMALL_REGISTER_CLASSES && (reg_parm_seen || preserve_subexpressions_p ())#else && preserve_subexpressions_p ()#endif ) args[i].value = copy_to_mode_reg (args[i].mode, args[i].value); }#if defined(ACCUMULATE_OUTGOING_ARGS) && defined(REG_PARM_STACK_SPACE) /* The argument list is the property of the called routine and it may clobber it. If the fixed area has been used for previous parameters, we must save and restore it. Here we compute the boundary of the that needs to be saved, if any. */#ifdef ARGS_GROW_DOWNWARD for (i = 0; i < reg_parm_stack_space + 1; i++)#else for (i = 0; i < reg_parm_stack_space; i++)#endif { if (i >= highest_outgoing_arg_in_use || stack_usage_map[i] == 0) continue; if (low_to_save == -1) low_to_save = i; high_to_save = i; } if (low_to_save >= 0) { int num_to_save = high_to_save - low_to_save + 1; enum machine_mode save_mode = mode_for_size (num_to_save * BITS_PER_UNIT, MODE_INT, 1); rtx stack_area; /* If we don't have the required alignment, must do this in BLKmode. */ if ((low_to_save & (MIN (GET_MODE_SIZE (save_mode), BIGGEST_ALIGNMENT / UNITS_PER_WORD) - 1))) save_mode = BLKmode; stack_area = gen_rtx (MEM, save_mode, memory_address (save_mode, #ifdef ARGS_GROW_DOWNWARD plus_constant (argblock, - high_to_save)#else plus_constant (argblock, low_to_save)#endif )); if (save_mode == BLKmode) { save_area = assign_stack_temp (BLKmode, num_to_save, 0); MEM_IN_STRUCT_P (save_area) = 0; emit_block_move (validize_mem (save_area), stack_area, GEN_INT (num_to_save), PARM_BOUNDARY / BITS_PER_UNIT); } else { save_area = gen_reg_rtx (save_mode); emit_move_insn (save_area, stack_area); } }#endif /* Now store (and compute if necessary) all non-register parms. These come before register parms, since they can require block-moves, which could clobber the registers used for register parms. Parms which have partial registers are not stored here, but we do preallocate space here if they want that. */ for (i = 0; i < num_actuals; i++) if (args[i].reg == 0 || args[i].pass_on_stack) store_one_arg (&args[i], argblock, may_be_alloca, args_size.var != 0, fndecl, reg_parm_stack_space);#ifdef STRICT_ALIGNMENT /* If we have a parm that is passed in registers but not in memory and whose alignment does not permit a direct copy into registers, make a group of pseudos that correspond to each register that we will later fill. */ for (i = 0; i < num_actuals; i++) if (args[i].reg != 0 && ! args[i].pass_on_stack && args[i].mode == BLKmode && (TYPE_ALIGN (TREE_TYPE (args[i].tree_value)) < MIN (BIGGEST_ALIGNMENT, BITS_PER_WORD))) { int bytes = int_size_in_bytes (TREE_TYPE (args[i].tree_value)); int big_endian_correction = 0; args[i].n_aligned_regs = args[i].partial ? args[i].partial : (bytes + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD; args[i].aligned_regs = (rtx *) alloca (sizeof (rtx) * args[i].n_aligned_regs); /* Structures smaller than a word are aligned to the least significant byte (to the right). On a BYTES_BIG_ENDIAN machine, this means we must skip the empty high order bytes when calculating the bit offset. */ if (BYTES_BIG_ENDIAN && bytes < UNITS_PER_WORD) big_endian_correction = (BITS_PER_WORD - (bytes * BITS_PER_UNIT)); for (j = 0; j < args[i].n_aligned_regs; j++) { rtx reg = gen_reg_rtx (word_mode); rtx word = operand_subword_force (args[i].value, j, BLKmode); int bitsize = TYPE_ALIGN (TREE_TYPE (args[i].tree_value)); int bitpos; args[i].aligned_regs[j] = reg; /* Clobber REG and move each partword into it. Ensure we don't go past the end of the structure. Note that the loop below works because we've already verified that padding and endianness are compatible. */ emit_insn (gen_rtx (CLOBBER, VOIDmode, reg)); for (bitpos = 0; bitpos < BITS_PER_WORD && bytes > 0; bitpos += bitsize, bytes -= bitsize / BITS_PER_UNIT) { int xbitpos = bitpos + big_endian_correction; store_bit_field (reg, bitsize, xbitpos, word_mode, extract_bit_field (word, bitsize, bitpos, 1, NULL_RTX, word_mode, word_mode, bitsize / BITS_PER_UNIT, BITS_PER_WORD), bitsize / BITS_PER_UNIT, BITS_PER_WORD); } } }#endif /* Now store any partially-in-registers parm. This is the last place a block-move can happen. */ if (reg_parm_seen) for (i = 0; i < num_actuals; i++) if (args[i].partial != 0 && ! args[i].pass_on_stack) store_one_arg (&args[i], argblock, may_be_alloca, args_size.var != 0, fndecl, reg_parm_stack_space);#ifndef PUSH_ARGS_REVERSED#ifdef STACK_BOUNDARY /* If we pushed args in forward order, perform stack alignment after pushing the last arg. */ if (argblock == 0) anti_adjust_stack (GEN_INT (args_size.constant - original_args_size.constant));#endif#endif /* If register arguments require space on the stack and stack space was not preallocated, allocate stack space here for arguments passed in registers. */#if ! defined(ACCUMULATE_OUTGOING_ARGS) && defined(OUTGOING_REG_PARM_STACK_SPACE) if (must_preallocate == 0 && reg_parm_stack_space > 0) anti_adjust_stack (GEN_INT (reg_parm_stack_space));#endif /* Pass the function the address in which to return a structure value. */ if (structure_value_addr && ! structure_value_addr_parm) { emit_move_insn (struct_value_rtx, force_reg (Pmode, force_operand (structure_value_addr, NULL_RTX))); if (GET_CODE (struct_value_rtx) == REG) use_reg (&call_fusage, struct_value_rtx); } funexp = prepare_call_address (funexp, fndecl, &call_fusage, reg_parm_seen); /* Now do the register loads required for any wholly-register parms or any parms which are passed both on the stack and in a register. Their expressions were already evaluated. Mark all register-parms as living through the call, putting these USE insns in the CALL_INSN_FUNCTION_USAGE field. */ for (i = 0; i < num_actuals; i++) { rtx list = args[i].reg; int partial = args[i].partial; while (list) { rtx reg; int nregs; /* Process each register that needs to get this arg. */ if (GET_CODE (list) == EXPR_LIST) reg = XEXP (list, 0), list = XEXP (list, 1); else reg = list, list = 0; /* Set to non-negative if must move a word at a time, even if just one word (e.g, partial == 1 && mode == DFmode). Set to -1 if we just use a normal move insn. This value can be zero if the argument is a zero size structure with no fields. */ nregs = (partial ? partial : (TYPE_MODE (TREE_TYPE (args[i].tree_value)) == BLKmode ? ((int_size_in_bytes (TREE_TYPE (args[i].tree_value)) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD) : -1)); /* If simple case, just do move. If normal partial, store_one_arg has already loaded the register for us. In all other cases, load the register(s) from memory. */ if (nregs == -1) emit_move_insn (reg, args[i].value);#ifdef STRICT_ALIGNMENT /* If we have pre-computed the values to put in the registers in the case of non-aligned structures, copy them in now. */ else if (args[i].n_aligned_regs != 0) for (j = 0; j < args[i].n_aligned_regs; j++) emit_move_insn (gen_rtx (REG, word_mode, REGNO (reg) + j), args[i].aligned_regs[j]);#endif else if (args[i].partial == 0 || args[i].pass_on_stack) move_block_to_reg (REGNO (reg), validize_mem (args[i].value), nregs, args[i].mode); if (nregs == -1) use_reg (&call_fusage, reg); else use_regs (&call_fusage, REGNO (reg), nregs == 0 ? 1 : nregs); /* PARTIAL referred only to the first register, so clear it for the next time. */ partial = 0; } } /* Perform postincrements before actually calling the function. */ emit_queue (); /* All arguments and registers used for the call must be set up by now! */ /* Generate the actual call instruction. */ emit_call_1 (funexp, fndecl, funtype, args_size.constant, struct_value_size, FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1), valreg, old_inhibit_defer_pop, call_fusage, is_const); /* If call is cse'able, make appropriate pair of reg-notes around it. Test valreg so we don't crash; may safely ignore `const' if return type is void. */ if (is_const && valreg != 0) { rtx note = 0; rtx temp = gen_reg_rtx (GET_MODE (valreg)); rtx insns; /* Construct an "equal form" for the value which mentions all the arguments in order as well as the function name. */#ifdef PUSH_ARGS_REVERSED for (i = 0; i < num_actuals; i++) note = gen_rtx (EXPR_LIST, VOIDmode, args[i].initial_value, note);#else for (i = num_actuals - 1; i >= 0; i--) note = gen_rtx (EXPR_LIST, VOIDmode, args[i].initial_value, note);#endif note = gen_rtx (EXPR_LIST, VOIDmode, funexp, note); insns = get_insns (); end_sequence (); emit_libcall_block (insns, temp, valreg, note); valreg = temp; } else if (is_const) { /* Otherwise, just write out the sequence without a note. */ rtx insns = get_insns (); end_sequence (); emit_insns (insns); } /* For calls to `setjmp', etc., inform flow.c it should complain if nonvolatile values are live. */ if (returns_twice) { emit_note (name, NOTE_INSN_SETJMP); current_function_calls_setjmp = 1; } if (is_longjmp) current_function_calls_longjmp = 1; /* Notice functions that cannot return. If optimizing, insns emitted below will be dead. If not optimizing, they will exist, which is useful if the user uses the `return' command in the debugger. */ if (is_volatile || is_longjmp) emit_barrier (); /* If value type not void, return an rtx for the value. */ /* If there are cleanups to be called, don't use a hard reg as target. */ if (cleanups_this_call != old_cleanups && target && REG_P (target) && REGNO (target) < FIRST_PSEUDO_REGISTER) target = 0; if (TYPE_MODE (TREE_TYPE (exp)) == VOIDmode || ignore) { target = const0_rtx; } else if (structure_value_addr) { if (target == 0 || GET_CODE (target) != MEM) { target = gen_rtx (MEM, TYPE_MODE (TREE_TYPE (exp)), memory_add
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -