📄 expr.c
字号:
#ifdef TARGET_MEM_FUNCTIONS emit_library_call (memcpy_libfunc, 0, VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode, size, Pmode);#else emit_library_call (bcopy_libfunc, 0, VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode, size, Pmode);#endif OK_DEFER_POP; } } else if (partial > 0) { /* Scalar partly in registers. */ int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD; int i; int not_stack; /* # words of start of argument that we must make space for but need not store. */ int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD); int args_offset = INTVAL (args_so_far); int skip; /* Push padding now if padding above and stack grows down, or if padding below and stack grows up. But if space already allocated, this has already been done. */ if (extra && args_addr == 0 && where_pad != none && where_pad != stack_direction) anti_adjust_stack (GEN_INT (extra)); /* If we make space by pushing it, we might as well push the real data. Otherwise, we can leave OFFSET nonzero and leave the space uninitialized. */ if (args_addr == 0) offset = 0; /* Now NOT_STACK gets the number of words that we don't need to allocate on the stack. */ not_stack = partial - offset; /* If the partial register-part of the arg counts in its stack size, skip the part of stack space corresponding to the registers. Otherwise, start copying to the beginning of the stack space, by setting SKIP to 0. */#ifndef REG_PARM_STACK_SPACE skip = 0;#else skip = not_stack;#endif if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x)) x = validize_mem (force_const_mem (mode, x)); /* If X is a hard register in a non-integer mode, copy it into a pseudo; SUBREGs of such registers are not allowed. */ if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT)) x = copy_to_reg (x); /* Loop over all the words allocated on the stack for this arg. */ /* We can do it by words, because any scalar bigger than a word has a size a multiple of a word. */#ifndef PUSH_ARGS_REVERSED for (i = not_stack; i < size; i++)#else for (i = size - 1; i >= not_stack; i--)#endif if (i >= not_stack + offset) emit_push_insn (operand_subword_force (x, i, mode), word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX, 0, args_addr, GEN_INT (args_offset + ((i - not_stack + skip) * UNITS_PER_WORD))); } else { rtx addr; /* Push padding now if padding above and stack grows down, or if padding below and stack grows up. But if space already allocated, this has already been done. */ if (extra && args_addr == 0 && where_pad != none && where_pad != stack_direction) anti_adjust_stack (GEN_INT (extra));#ifdef PUSH_ROUNDING if (args_addr == 0) addr = gen_push_operand (); else#endif if (GET_CODE (args_so_far) == CONST_INT) addr = memory_address (mode, plus_constant (args_addr, INTVAL (args_so_far))); else addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr, args_so_far)); emit_move_insn (gen_rtx (MEM, mode, addr), x); } ret: /* If part should go in registers, copy that part into the appropriate registers. Do this now, at the end, since mem-to-mem copies above may do function calls. */ if (partial > 0) move_block_to_reg (REGNO (reg), x, partial, mode); if (extra && args_addr == 0 && where_pad == stack_direction) anti_adjust_stack (GEN_INT (extra));}/* Output a library call to function FUN (a SYMBOL_REF rtx) (emitting the queue unless NO_QUEUE is nonzero), for a value of mode OUTMODE, with NARGS different arguments, passed as alternating rtx values and machine_modes to convert them to. The rtx values should have been passed through protect_from_queue already. NO_QUEUE will be true if and only if the library call is a `const' call which will be enclosed in REG_LIBCALL/REG_RETVAL notes; it is equivalent to the variable is_const in expand_call. NO_QUEUE must be true for const calls, because if it isn't, then any pending increment will be emitted between REG_LIBCALL/REG_RETVAL notes, and will be lost if the libcall sequence is optimized away. NO_QUEUE must be false for non-const calls, because if it isn't, the call insn will have its CONST_CALL_P bit set, and it will be incorrectly optimized. For instance, the instruction scheduler may incorrectly move memory references across the non-const call. */voidemit_library_call (va_alist) va_dcl{ va_list p; struct args_size args_size; register int argnum; enum machine_mode outmode; int nargs; rtx fun; rtx orgfun; int inc; int count; rtx argblock = 0; CUMULATIVE_ARGS args_so_far; struct arg { rtx value; enum machine_mode mode; rtx reg; int partial; struct args_size offset; struct args_size size; }; struct arg *argvec; int old_inhibit_defer_pop = inhibit_defer_pop; int no_queue = 0; rtx use_insns; va_start (p); orgfun = fun = va_arg (p, rtx); no_queue = va_arg (p, int); outmode = va_arg (p, enum machine_mode); nargs = va_arg (p, int); /* Copy all the libcall-arguments out of the varargs data and into a vector ARGVEC. Compute how to pass each argument. We only support a very small subset of the full argument passing conventions to limit complexity here since library functions shouldn't have many args. */ argvec = (struct arg *) alloca (nargs * sizeof (struct arg)); INIT_CUMULATIVE_ARGS (args_so_far, (tree)0, fun); args_size.constant = 0; args_size.var = 0; for (count = 0; count < nargs; count++) { rtx val = va_arg (p, rtx); enum machine_mode mode = va_arg (p, enum machine_mode); /* We cannot convert the arg value to the mode the library wants here; must do it earlier where we know the signedness of the arg. */ if (mode == BLKmode || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode)) abort (); /* On some machines, there's no way to pass a float to a library fcn. Pass it as a double instead. */#ifdef LIBGCC_NEEDS_DOUBLE if (LIBGCC_NEEDS_DOUBLE && mode == SFmode) val = convert_to_mode (DFmode, val, 0), mode = DFmode;#endif /* There's no need to call protect_from_queue, because either emit_move_insn or emit_push_insn will do that. */ /* Make sure it is a reasonable operand for a move or push insn. */ if (GET_CODE (val) != REG && GET_CODE (val) != MEM && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val))) val = force_operand (val, NULL_RTX); argvec[count].value = val; argvec[count].mode = mode;#ifdef FUNCTION_ARG_PASS_BY_REFERENCE if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1)) abort ();#endif argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1); if (argvec[count].reg && GET_CODE (argvec[count].reg) == EXPR_LIST) abort ();#ifdef FUNCTION_ARG_PARTIAL_NREGS argvec[count].partial = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);#else argvec[count].partial = 0;#endif locate_and_pad_parm (mode, NULL_TREE, argvec[count].reg && argvec[count].partial == 0, NULL_TREE, &args_size, &argvec[count].offset, &argvec[count].size); if (argvec[count].size.var) abort ();#ifndef REG_PARM_STACK_SPACE if (argvec[count].partial) argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;#endif if (argvec[count].reg == 0 || argvec[count].partial != 0#ifdef REG_PARM_STACK_SPACE || 1#endif ) args_size.constant += argvec[count].size.constant;#ifdef ACCUMULATE_OUTGOING_ARGS /* If this arg is actually passed on the stack, it might be clobbering something we already put there (this library call might be inside the evaluation of an argument to a function whose call requires the stack). This will only occur when the library call has sufficient args to run out of argument registers. Abort in this case; if this ever occurs, code must be added to save and restore the arg slot. */ if (argvec[count].reg == 0 || argvec[count].partial != 0) abort ();#endif FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree)0, 1); } va_end (p); /* If this machine requires an external definition for library functions, write one out. */ assemble_external_libcall (fun);#ifdef STACK_BOUNDARY args_size.constant = (((args_size.constant + (STACK_BYTES - 1)) / STACK_BYTES) * STACK_BYTES);#endif#ifdef REG_PARM_STACK_SPACE args_size.constant = MAX (args_size.constant, REG_PARM_STACK_SPACE ((tree) 0));#endif#ifdef ACCUMULATE_OUTGOING_ARGS if (args_size.constant > current_function_outgoing_args_size) current_function_outgoing_args_size = args_size.constant; args_size.constant = 0;#endif#ifndef PUSH_ROUNDING argblock = push_block (GEN_INT (args_size.constant), 0, 0);#endif#ifdef PUSH_ARGS_REVERSED inc = -1; argnum = nargs - 1;#else inc = 1; argnum = 0;#endif /* Push the args that need to be pushed. */ for (count = 0; count < nargs; count++, argnum += inc) { register enum machine_mode mode = argvec[argnum].mode; register rtx val = argvec[argnum].value; rtx reg = argvec[argnum].reg; int partial = argvec[argnum].partial; if (! (reg != 0 && partial == 0)) emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0, argblock, GEN_INT (argvec[count].offset.constant)); NO_DEFER_POP; }#ifdef PUSH_ARGS_REVERSED argnum = nargs - 1;#else argnum = 0;#endif /* Now load any reg parms into their regs. */ for (count = 0; count < nargs; count++, argnum += inc) { register enum machine_mode mode = argvec[argnum].mode; register rtx val = argvec[argnum].value; rtx reg = argvec[argnum].reg; int partial = argvec[argnum].partial; if (reg != 0 && partial == 0) emit_move_insn (reg, val); NO_DEFER_POP; } /* For version 1.37, try deleting this entirely. */ if (! no_queue) emit_queue (); /* Any regs containing parms remain in use through the call. */ start_sequence (); for (count = 0; count < nargs; count++) if (argvec[count].reg != 0) emit_insn (gen_rtx (USE, VOIDmode, argvec[count].reg)); use_insns = get_insns (); end_sequence (); fun = prepare_call_address (fun, NULL_TREE, &use_insns); /* Don't allow popping to be deferred, since then cse'ing of library calls could delete a call and leave the pop. */ NO_DEFER_POP; /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which will set inhibit_defer_pop to that value. */ emit_call_1 (fun, get_identifier (XSTR (orgfun, 0)), args_size.constant, 0, FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1), outmode != VOIDmode ? hard_libcall_value (outmode) : NULL_RTX, old_inhibit_defer_pop + 1, use_insns, no_queue); /* Now restore inhibit_defer_pop to its actual original value. */ OK_DEFER_POP;}/* Expand an assignment that stores the value of FROM into TO. If WANT_VALUE is nonzero, return an rtx for the value of TO. (This may contain a QUEUED rtx.) Otherwise, the returned value is not meaningful. SUGGEST_REG is no longer actually used. It used to mean, copy the value through a register and return that register, if that is possible. But now we do this if WANT_VALUE. If the value stored is a constant, we return the constant. */rtxexpand_assignment (to, from, want_value, suggest_reg) tree to, from; int want_value; int suggest_reg;{ register rtx to_rtx = 0; rtx result; /* Don't crash if the lhs of the assignment was erroneous. */ if (TREE_CODE (to) == ERROR_MARK) return expand_expr (from, NULL_RTX, VOIDmode, 0); /* Assignment of a structure component needs special treatment if the structure component's rtx is not simply a MEM. Assignment of an array element at a constant index has the same problem. */ if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF || (TREE_CODE (to) == ARRAY_REF && TREE_CODE (TREE_OPERAND (to, 1)) == INTEGER_CST && TREE_CODE (TYPE_SIZE (TREE_TYPE (to))) == INTEGER_CST)) { enum machine_mode mode1; int bitsize; int bitpos; tree offset; int unsignedp; int volatilep = 0; tree tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1, &unsignedp, &volatilep); /* If we are going to use store_bit_field and extract_bit_field, make sure to_rtx will be safe for multiple use. */ if (mode1 == VOIDmode && want_value) tem = stabilize_reference (tem); to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0); if (offset != 0) { rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0); if (GET_CODE (to_rtx) != MEM) abort (); to_rtx = change_address (to_rtx, VOIDmode, gen_rtx (PLUS, Pmode, XEXP (to_rtx, 0), force_reg (Pmode, offset_rtx))); } if (volatilep) { if (GET_CODE (to_rtx) == MEM) MEM_VOLATILE_P (to_rtx) = 1;#if 0 /* This was turned off because, when a field is volatile in an object which is not volatile, the object may be in a register, and then we would abort over here. */ else abort ();#endif } result = store_field (to_rtx, bitsize, bitpos, mode1, from, (want_value /* Spurious cast makes HPUX compiler happy. */ ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to)) : VOIDmode), unsignedp, /* Required alignment of containing datum. */ TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT, int_size_in_bytes (TREE_TYPE (tem))); preserve_temp_slots (result); free_temp_slots (); return result; } /* Ordinary treatment. Expand TO to get a REG or MEM rtx. Don't re-expand if it was expanded already (in COMPONENT_REF case). */ if (to_rtx
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -