⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 expr.c

📁 gcc库的原代码,对编程有很大帮助.
💻 C
📖 第 1 页 / 共 5 页
字号:
/* Convert tree expression to rtl instructions, for GNU compiler.   Copyright (C) 1988, 1992, 1993, 1994, 1995 Free Software Foundation, Inc.This file is part of GNU CC.GNU CC is free software; you can redistribute it and/or modifyit under the terms of the GNU General Public License as published bythe Free Software Foundation; either version 2, or (at your option)any later version.GNU CC is distributed in the hope that it will be useful,but WITHOUT ANY WARRANTY; without even the implied warranty ofMERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See theGNU General Public License for more details.You should have received a copy of the GNU General Public Licensealong with GNU CC; see the file COPYING.  If not, write tothe Free Software Foundation, 59 Temple Place - Suite 330,Boston, MA 02111-1307, USA.  */#include "config.h"#include "machmode.h"#include "rtl.h"#include "tree.h"#include "obstack.h"#include "flags.h"#include "regs.h"#include "function.h"#include "insn-flags.h"#include "insn-codes.h"#include "expr.h"#include "insn-config.h"#include "recog.h"#include "output.h"#include "typeclass.h"#include "bytecode.h"#include "bc-opcode.h"#include "bc-typecd.h"#include "bc-optab.h"#include "bc-emit.h"#define CEIL(x,y) (((x) + (y) - 1) / (y))/* Decide whether a function's arguments should be processed   from first to last or from last to first.   They should if the stack and args grow in opposite directions, but   only if we have push insns.  */#ifdef PUSH_ROUNDING#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)#define PUSH_ARGS_REVERSED	/* If it's last to first */#endif#endif#ifndef STACK_PUSH_CODE#ifdef STACK_GROWS_DOWNWARD#define STACK_PUSH_CODE PRE_DEC#else#define STACK_PUSH_CODE PRE_INC#endif#endif/* Like STACK_BOUNDARY but in units of bytes, not bits.  */#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)/* If this is nonzero, we do not bother generating VOLATILE   around volatile memory references, and we are willing to   output indirect addresses.  If cse is to follow, we reject   indirect addresses so a useful potential cse is generated;   if it is used only once, instruction combination will produce   the same indirect address eventually.  */int cse_not_expected;/* Nonzero to generate code for all the subroutines within an   expression before generating the upper levels of the expression.   Nowadays this is never zero.  */int do_preexpand_calls = 1;/* Number of units that we should eventually pop off the stack.   These are the arguments to function calls that have already returned.  */int pending_stack_adjust;/* Nonzero means stack pops must not be deferred, and deferred stack   pops must not be output.  It is nonzero inside a function call,   inside a conditional expression, inside a statement expression,   and in other cases as well.  */int inhibit_defer_pop;/* A list of all cleanups which belong to the arguments of   function calls being expanded by expand_call.  */tree cleanups_this_call;/* When temporaries are created by TARGET_EXPRs, they are created at   this level of temp_slot_level, so that they can remain allocated   until no longer needed.  CLEANUP_POINT_EXPRs define the lifetime   of TARGET_EXPRs.  */int target_temp_slot_level;/* Nonzero means __builtin_saveregs has already been done in this function.   The value is the pseudoreg containing the value __builtin_saveregs   returned.  */static rtx saveregs_value;/* Similarly for __builtin_apply_args.  */static rtx apply_args_value;/* This structure is used by move_by_pieces to describe the move to   be performed.  */struct move_by_pieces{  rtx to;  rtx to_addr;  int autinc_to;  int explicit_inc_to;  int to_struct;  rtx from;  rtx from_addr;  int autinc_from;  int explicit_inc_from;  int from_struct;  int len;  int offset;  int reverse;};/* Used to generate bytecodes: keep track of size of local variables,   as well as depth of arithmetic stack. (Notice that variables are   stored on the machine's stack, not the arithmetic stack.) */extern int local_vars_size;extern int stack_depth;extern int max_stack_depth;extern struct obstack permanent_obstack;static rtx enqueue_insn		PROTO((rtx, rtx));static int queued_subexp_p	PROTO((rtx));static void init_queue		PROTO((void));static void move_by_pieces	PROTO((rtx, rtx, int, int));static int move_by_pieces_ninsns PROTO((unsigned int, int));static void move_by_pieces_1	PROTO((rtx (*) (), enum machine_mode,				       struct move_by_pieces *));static void store_constructor	PROTO((tree, rtx));static rtx store_field		PROTO((rtx, int, int, enum machine_mode, tree,				       enum machine_mode, int, int, int));static int get_inner_unaligned_p PROTO((tree));static tree save_noncopied_parts PROTO((tree, tree));static tree init_noncopied_parts PROTO((tree, tree));static int safe_from_p		PROTO((rtx, tree));static int fixed_type_p		PROTO((tree));static int get_pointer_alignment PROTO((tree, unsigned));static tree string_constant	PROTO((tree, tree *));static tree c_strlen		PROTO((tree));static rtx expand_builtin	PROTO((tree, rtx, rtx,				       enum machine_mode, int));static int apply_args_size	PROTO((void));static int apply_result_size	PROTO((void));static rtx result_vector	PROTO((int, rtx));static rtx expand_builtin_apply_args PROTO((void));static rtx expand_builtin_apply	PROTO((rtx, rtx, rtx));static void expand_builtin_return PROTO((rtx));static rtx expand_increment	PROTO((tree, int));rtx bc_expand_increment		PROTO((struct increment_operator *, tree));tree bc_runtime_type_code 	PROTO((tree));rtx bc_allocate_local		PROTO((int, int));void bc_store_memory 		PROTO((tree, tree));tree bc_expand_component_address PROTO((tree));tree bc_expand_address 		PROTO((tree));void bc_expand_constructor 	PROTO((tree));void bc_adjust_stack 		PROTO((int));tree bc_canonicalize_array_ref	PROTO((tree));void bc_load_memory		PROTO((tree, tree));void bc_load_externaddr		PROTO((rtx));void bc_load_externaddr_id	PROTO((tree, int));void bc_load_localaddr		PROTO((rtx));void bc_load_parmaddr		PROTO((rtx));static void preexpand_calls	PROTO((tree));static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));void do_jump_by_parts_greater_rtx PROTO((enum machine_mode, int, rtx, rtx, rtx, rtx));static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));static void do_jump_by_parts_equality_rtx PROTO((rtx, rtx, rtx));static void do_jump_for_compare	PROTO((rtx, rtx, rtx));static rtx compare		PROTO((tree, enum rtx_code, enum rtx_code));static rtx do_store_flag	PROTO((tree, rtx, enum machine_mode, int));static tree defer_cleanups_to	PROTO((tree));extern void (*interim_eh_hook)	PROTO((tree));extern tree truthvalue_conversion       PROTO((tree));/* Record for each mode whether we can move a register directly to or   from an object of that mode in memory.  If we can't, we won't try   to use that mode directly when accessing a field of that mode.  */static char direct_load[NUM_MACHINE_MODES];static char direct_store[NUM_MACHINE_MODES];/* MOVE_RATIO is the number of move instructions that is better than   a block move.  */#ifndef MOVE_RATIO#if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)#define MOVE_RATIO 2#else/* A value of around 6 would minimize code size; infinity would minimize   execution time.  */#define MOVE_RATIO 15#endif#endif/* This array records the insn_code of insns to perform block moves.  */enum insn_code movstr_optab[NUM_MACHINE_MODES];/* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */#ifndef SLOW_UNALIGNED_ACCESS#define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT#endif/* Register mappings for target machines without register windows.  */#ifndef INCOMING_REGNO#define INCOMING_REGNO(OUT) (OUT)#endif#ifndef OUTGOING_REGNO#define OUTGOING_REGNO(IN) (IN)#endif/* Maps used to convert modes to const, load, and store bytecodes. */enum bytecode_opcode mode_to_const_map[MAX_MACHINE_MODE];enum bytecode_opcode mode_to_load_map[MAX_MACHINE_MODE];enum bytecode_opcode mode_to_store_map[MAX_MACHINE_MODE];/* Initialize maps used to convert modes to const, load, and store   bytecodes. */voidbc_init_mode_to_opcode_maps (){  int mode;  for (mode = 0; mode < (int) MAX_MACHINE_MODE; mode++)    mode_to_const_map[mode] =      mode_to_load_map[mode] =	mode_to_store_map[mode] = neverneverland;      #define DEF_MODEMAP(SYM, CODE, UCODE, CONST, LOAD, STORE) \  mode_to_const_map[(int) SYM] = CONST; \  mode_to_load_map[(int) SYM] = LOAD; \  mode_to_store_map[(int) SYM] = STORE;#include "modemap.def"#undef DEF_MODEMAP}/* This is run once per compilation to set up which modes can be used   directly in memory and to initialize the block move optab.  */voidinit_expr_once (){  rtx insn, pat;  enum machine_mode mode;  /* Try indexing by frame ptr and try by stack ptr.     It is known that on the Convex the stack ptr isn't a valid index.     With luck, one or the other is valid on any machine.  */  rtx mem = gen_rtx (MEM, VOIDmode, stack_pointer_rtx);  rtx mem1 = gen_rtx (MEM, VOIDmode, frame_pointer_rtx);  start_sequence ();  insn = emit_insn (gen_rtx (SET, 0, 0));  pat = PATTERN (insn);  for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;       mode = (enum machine_mode) ((int) mode + 1))    {      int regno;      rtx reg;      int num_clobbers;      direct_load[(int) mode] = direct_store[(int) mode] = 0;      PUT_MODE (mem, mode);      PUT_MODE (mem1, mode);      /* See if there is some register that can be used in this mode and	 directly loaded or stored from memory.  */      if (mode != VOIDmode && mode != BLKmode)	for (regno = 0; regno < FIRST_PSEUDO_REGISTER	     && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);	     regno++)	  {	    if (! HARD_REGNO_MODE_OK (regno, mode))	      continue;	    reg = gen_rtx (REG, mode, regno);	    SET_SRC (pat) = mem;	    SET_DEST (pat) = reg;	    if (recog (pat, insn, &num_clobbers) >= 0)	      direct_load[(int) mode] = 1;	    SET_SRC (pat) = mem1;	    SET_DEST (pat) = reg;	    if (recog (pat, insn, &num_clobbers) >= 0)	      direct_load[(int) mode] = 1;	    SET_SRC (pat) = reg;	    SET_DEST (pat) = mem;	    if (recog (pat, insn, &num_clobbers) >= 0)	      direct_store[(int) mode] = 1;	    SET_SRC (pat) = reg;	    SET_DEST (pat) = mem1;	    if (recog (pat, insn, &num_clobbers) >= 0)	      direct_store[(int) mode] = 1;	  }    }  end_sequence ();}      /* This is run at the start of compiling a function.  */voidinit_expr (){  init_queue ();  pending_stack_adjust = 0;  inhibit_defer_pop = 0;  cleanups_this_call = 0;  saveregs_value = 0;  apply_args_value = 0;  forced_labels = 0;}/* Save all variables describing the current status into the structure *P.   This is used before starting a nested function.  */voidsave_expr_status (p)     struct function *p;{  /* Instead of saving the postincrement queue, empty it.  */  emit_queue ();  p->pending_stack_adjust = pending_stack_adjust;  p->inhibit_defer_pop = inhibit_defer_pop;  p->cleanups_this_call = cleanups_this_call;  p->saveregs_value = saveregs_value;  p->apply_args_value = apply_args_value;  p->forced_labels = forced_labels;  pending_stack_adjust = 0;  inhibit_defer_pop = 0;  cleanups_this_call = 0;  saveregs_value = 0;  apply_args_value = 0;  forced_labels = 0;}/* Restore all variables describing the current status from the structure *P.   This is used after a nested function.  */voidrestore_expr_status (p)     struct function *p;{  pending_stack_adjust = p->pending_stack_adjust;  inhibit_defer_pop = p->inhibit_defer_pop;  cleanups_this_call = p->cleanups_this_call;  saveregs_value = p->saveregs_value;  apply_args_value = p->apply_args_value;  forced_labels = p->forced_labels;}/* Manage the queue of increment instructions to be output   for POSTINCREMENT_EXPR expressions, etc.  */static rtx pending_chain;/* Queue up to increment (or change) VAR later.  BODY says how:   BODY should be the same thing you would pass to emit_insn   to increment right away.  It will go to emit_insn later on.   The value is a QUEUED expression to be used in place of VAR   where you want to guarantee the pre-incrementation value of VAR.  */static rtxenqueue_insn (var, body)     rtx var, body;{  pending_chain = gen_rtx (QUEUED, GET_MODE (var),			   var, NULL_RTX, NULL_RTX, body, pending_chain);  return pending_chain;}/* Use protect_from_queue to convert a QUEUED expression   into something that you can put immediately into an instruction.   If the queued incrementation has not happened yet,   protect_from_queue returns the variable itself.   If the incrementation has happened, protect_from_queue returns a temp   that contains a copy of the old value of the variable.   Any time an rtx which might possibly be a QUEUED is to be put   into an instruction, it must be passed through protect_from_queue first.   QUEUED expressions are not meaningful in instructions.   Do not pass a value through protect_from_queue and then hold   on to it for a while before putting it in an instruction!   If the queue is flushed in between, incorrect code will result.  */rtxprotect_from_queue (x, modify)     register rtx x;     int modify;{  register RTX_CODE code = GET_CODE (x);#if 0  /* A QUEUED can hang around after the queue is forced out.  */  /* Shortcut for most common case.  */  if (pending_chain == 0)    return x;#endif  if (code != QUEUED)    {      /* A special hack for read access to (MEM (QUEUED ...)) to facilitate	 use of autoincrement.  Make a copy of the contents of the memory	 location rather than a copy of the address, but not if the value is	 of mode BLKmode.  Don't modify X in place since it might be	 shared.  */      if (code == MEM && GET_MODE (x) != BLKmode	  && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)	{	  register rtx y = XEXP (x, 0);	  register rtx new = gen_rtx (MEM, GET_MODE (x), QUEUED_VAR (y));	  MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);	  RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);	  MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);	  if (QUEUED_INSN (y))	    {	      register rtx temp = gen_reg_rtx (GET_MODE (new));	      emit_insn_before (gen_move_insn (temp, new),				QUEUED_INSN (y));	      return temp;	    }	  return new;	}      /* Otherwise, recursively protect the subexpressions of all	 the kinds of rtx's that can contain a QUEUED.  */      if (code == MEM)	{	  rtx tem = protect_from_queue (XEXP (x, 0), 0);	  if (tem != XEXP (x, 0))	    {

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -