⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 arm.c

📁 linux下编程用 编译软件
💻 C
📖 第 1 页 / 共 5 页
字号:
    {      if (arm_arch6k && !TARGET_THUMB)	target_thread_pointer = TP_CP15;      else	target_thread_pointer = TP_SOFT;    }  if (TARGET_HARD_TP && TARGET_THUMB)    error ("can not use -mtp=cp15 with -mthumb");  /* Override the default structure alignment for AAPCS ABI.  */  if (TARGET_AAPCS_BASED)    arm_structure_size_boundary = 8;  if (structure_size_string != NULL)    {      int size = strtol (structure_size_string, NULL, 0);      if (size == 8 || size == 32	  || (ARM_DOUBLEWORD_ALIGN && size == 64))	arm_structure_size_boundary = size;      else	warning (0, "structure size boundary can only be set to %s",		 ARM_DOUBLEWORD_ALIGN ? "8, 32 or 64": "8 or 32");    }  if (arm_pic_register_string != NULL)    {      int pic_register = decode_reg_name (arm_pic_register_string);      if (!flag_pic)	warning (0, "-mpic-register= is useless without -fpic");      /* Prevent the user from choosing an obviously stupid PIC register.  */      else if (pic_register < 0 || call_used_regs[pic_register]	       || pic_register == HARD_FRAME_POINTER_REGNUM	       || pic_register == STACK_POINTER_REGNUM	       || pic_register >= PC_REGNUM)	error ("unable to use '%s' for PIC register", arm_pic_register_string);      else	arm_pic_register = pic_register;    }  if (TARGET_THUMB && flag_schedule_insns)    {      /* Don't warn since it's on by default in -O2.  */      flag_schedule_insns = 0;    }  if (optimize_size)    {      arm_constant_limit = 1;      /* If optimizing for size, bump the number of instructions that we         are prepared to conditionally execute (even on a StrongARM).  */      max_insns_skipped = 6;    }  else    {      /* For processors with load scheduling, it never costs more than         2 cycles to load a constant, and the load scheduler may well	 reduce that to 1.  */      if (arm_ld_sched)        arm_constant_limit = 1;      /* On XScale the longer latency of a load makes it more difficult         to achieve a good schedule, so it's faster to synthesize	 constants that can be done in two insns.  */      if (arm_tune_xscale)        arm_constant_limit = 2;      /* StrongARM has early execution of branches, so a sequence         that is worth skipping is shorter.  */      if (arm_tune_strongarm)        max_insns_skipped = 3;    }  /* Register global variables with the garbage collector.  */  arm_add_gc_roots ();}static voidarm_add_gc_roots (void){  gcc_obstack_init(&minipool_obstack);  minipool_startobj = (char *) obstack_alloc (&minipool_obstack, 0);}/* A table of known ARM exception types.   For use with the interrupt function attribute.  */typedef struct{  const char *const arg;  const unsigned long return_value;}isr_attribute_arg;static const isr_attribute_arg isr_attribute_args [] ={  { "IRQ",   ARM_FT_ISR },  { "irq",   ARM_FT_ISR },  { "FIQ",   ARM_FT_FIQ },  { "fiq",   ARM_FT_FIQ },  { "ABORT", ARM_FT_ISR },  { "abort", ARM_FT_ISR },  { "ABORT", ARM_FT_ISR },  { "abort", ARM_FT_ISR },  { "UNDEF", ARM_FT_EXCEPTION },  { "undef", ARM_FT_EXCEPTION },  { "SWI",   ARM_FT_EXCEPTION },  { "swi",   ARM_FT_EXCEPTION },  { NULL,    ARM_FT_NORMAL }};/* Returns the (interrupt) function type of the current   function, or ARM_FT_UNKNOWN if the type cannot be determined.  */static unsigned longarm_isr_value (tree argument){  const isr_attribute_arg * ptr;  const char *              arg;  /* No argument - default to IRQ.  */  if (argument == NULL_TREE)    return ARM_FT_ISR;  /* Get the value of the argument.  */  if (TREE_VALUE (argument) == NULL_TREE      || TREE_CODE (TREE_VALUE (argument)) != STRING_CST)    return ARM_FT_UNKNOWN;  arg = TREE_STRING_POINTER (TREE_VALUE (argument));  /* Check it against the list of known arguments.  */  for (ptr = isr_attribute_args; ptr->arg != NULL; ptr++)    if (streq (arg, ptr->arg))      return ptr->return_value;  /* An unrecognized interrupt type.  */  return ARM_FT_UNKNOWN;}/* Computes the type of the current function.  */static unsigned longarm_compute_func_type (void){  unsigned long type = ARM_FT_UNKNOWN;  tree a;  tree attr;  gcc_assert (TREE_CODE (current_function_decl) == FUNCTION_DECL);  /* Decide if the current function is volatile.  Such functions     never return, and many memory cycles can be saved by not storing     register values that will never be needed again.  This optimization     was added to speed up context switching in a kernel application.  */  if (optimize > 0      && (TREE_NOTHROW (current_function_decl)          || !(flag_unwind_tables               || (flag_exceptions && !USING_SJLJ_EXCEPTIONS)))      && TREE_THIS_VOLATILE (current_function_decl))    type |= ARM_FT_VOLATILE;  if (cfun->static_chain_decl != NULL)    type |= ARM_FT_NESTED;  attr = DECL_ATTRIBUTES (current_function_decl);  a = lookup_attribute ("naked", attr);  if (a != NULL_TREE)    type |= ARM_FT_NAKED;  a = lookup_attribute ("isr", attr);  if (a == NULL_TREE)    a = lookup_attribute ("interrupt", attr);  if (a == NULL_TREE)    type |= TARGET_INTERWORK ? ARM_FT_INTERWORKED : ARM_FT_NORMAL;  else    type |= arm_isr_value (TREE_VALUE (a));  return type;}/* Returns the type of the current function.  */unsigned longarm_current_func_type (void){  if (ARM_FUNC_TYPE (cfun->machine->func_type) == ARM_FT_UNKNOWN)    cfun->machine->func_type = arm_compute_func_type ();  return cfun->machine->func_type;}/* Return 1 if it is possible to return using a single instruction.   If SIBLING is non-null, this is a test for a return before a sibling   call.  SIBLING is the call insn, so we can examine its register usage.  */intuse_return_insn (int iscond, rtx sibling){  int regno;  unsigned int func_type;  unsigned long saved_int_regs;  unsigned HOST_WIDE_INT stack_adjust;  arm_stack_offsets *offsets;  /* Never use a return instruction before reload has run.  */  if (!reload_completed)    return 0;  func_type = arm_current_func_type ();  /* Naked functions and volatile functions need special     consideration.  */  if (func_type & (ARM_FT_VOLATILE | ARM_FT_NAKED))    return 0;  /* So do interrupt functions that use the frame pointer.  */  if (IS_INTERRUPT (func_type) && frame_pointer_needed)    return 0;  offsets = arm_get_frame_offsets ();  stack_adjust = offsets->outgoing_args - offsets->saved_regs;  /* As do variadic functions.  */  if (current_function_pretend_args_size      || cfun->machine->uses_anonymous_args      /* Or if the function calls __builtin_eh_return () */      || current_function_calls_eh_return      /* Or if the function calls alloca */      || current_function_calls_alloca      /* Or if there is a stack adjustment.  However, if the stack pointer	 is saved on the stack, we can use a pre-incrementing stack load.  */      || !(stack_adjust == 0 || (frame_pointer_needed && stack_adjust == 4)))    return 0;  saved_int_regs = arm_compute_save_reg_mask ();  /* Unfortunately, the insn       ldmib sp, {..., sp, ...}     triggers a bug on most SA-110 based devices, such that the stack     pointer won't be correctly restored if the instruction takes a     page fault.  We work around this problem by popping r3 along with     the other registers, since that is never slower than executing     another instruction.     We test for !arm_arch5 here, because code for any architecture     less than this could potentially be run on one of the buggy     chips.  */  if (stack_adjust == 4 && !arm_arch5)    {      /* Validate that r3 is a call-clobbered register (always true in	 the default abi) ...  */      if (!call_used_regs[3])	return 0;      /* ... that it isn't being used for a return value ... */      if (arm_size_return_regs () >= (4 * UNITS_PER_WORD))	return 0;      /* ... or for a tail-call argument ...  */      if (sibling)	{	  gcc_assert (GET_CODE (sibling) == CALL_INSN);	  if (find_regno_fusage (sibling, USE, 3))	    return 0;	}      /* ... and that there are no call-saved registers in r0-r2	 (always true in the default ABI).  */      if (saved_int_regs & 0x7)	return 0;    }  /* Can't be done if interworking with Thumb, and any registers have been     stacked.  */  if (TARGET_INTERWORK && saved_int_regs != 0)    return 0;  /* On StrongARM, conditional returns are expensive if they aren't     taken and multiple registers have been stacked.  */  if (iscond && arm_tune_strongarm)    {      /* Conditional return when just the LR is stored is a simple	 conditional-load instruction, that's not expensive.  */      if (saved_int_regs != 0 && saved_int_regs != (1 << LR_REGNUM))	return 0;      if (flag_pic && regs_ever_live[PIC_OFFSET_TABLE_REGNUM])	return 0;    }  /* If there are saved registers but the LR isn't saved, then we need     two instructions for the return.  */  if (saved_int_regs && !(saved_int_regs & (1 << LR_REGNUM)))    return 0;  /* Can't be done if any of the FPA regs are pushed,     since this also requires an insn.  */  if (TARGET_HARD_FLOAT && TARGET_FPA)    for (regno = FIRST_FPA_REGNUM; regno <= LAST_FPA_REGNUM; regno++)      if (regs_ever_live[regno] && !call_used_regs[regno])	return 0;  /* Likewise VFP regs.  */  if (TARGET_HARD_FLOAT && TARGET_VFP)    for (regno = FIRST_VFP_REGNUM; regno <= LAST_VFP_REGNUM; regno++)      if (regs_ever_live[regno] && !call_used_regs[regno])	return 0;  if (TARGET_REALLY_IWMMXT)    for (regno = FIRST_IWMMXT_REGNUM; regno <= LAST_IWMMXT_REGNUM; regno++)      if (regs_ever_live[regno] && ! call_used_regs [regno])	return 0;  return 1;}/* Return TRUE if int I is a valid immediate ARM constant.  */intconst_ok_for_arm (HOST_WIDE_INT i){  int lowbit;  /* For machines with >32 bit HOST_WIDE_INT, the bits above bit 31 must     be all zero, or all one.  */  if ((i & ~(unsigned HOST_WIDE_INT) 0xffffffff) != 0      && ((i & ~(unsigned HOST_WIDE_INT) 0xffffffff)	  != ((~(unsigned HOST_WIDE_INT) 0)	      & ~(unsigned HOST_WIDE_INT) 0xffffffff)))    return FALSE;  i &= (unsigned HOST_WIDE_INT) 0xffffffff;  /* Fast return for 0 and small values.  We must do this for zero, since     the code below can't handle that one case.  */  if ((i & ~(unsigned HOST_WIDE_INT) 0xff) == 0)    return TRUE;  /* Get the number of trailing zeros, rounded down to the nearest even     number.  */  lowbit = (ffs ((int) i) - 1) & ~1;  if ((i & ~(((unsigned HOST_WIDE_INT) 0xff) << lowbit)) == 0)    return TRUE;  else if (lowbit <= 4	   && ((i & ~0xc000003f) == 0	       || (i & ~0xf000000f) == 0	       || (i & ~0xfc000003) == 0))    return TRUE;  return FALSE;}/* Return true if I is a valid constant for the operation CODE.  */static intconst_ok_for_op (HOST_WIDE_INT i, enum rtx_code code){  if (const_ok_for_arm (i))    return 1;  switch (code)    {    case PLUS:      return const_ok_for_arm (ARM_SIGN_EXTEND (-i));    case MINUS:		/* Should only occur with (MINUS I reg) => rsb */    case XOR:    case IOR:      return 0;    case AND:      return const_ok_for_arm (ARM_SIGN_EXTEND (~i));    default:      gcc_unreachable ();    }}/* Emit a sequence of insns to handle a large constant.   CODE is the code of the operation required, it can be any of SET, PLUS,   IOR, AND, XOR, MINUS;   MODE is the mode in which the operation is being performed;   VAL is the integer to operate on;   SOURCE is the other operand (a register, or a null-pointer for SET);   SUBTARGETS means it is safe to create scratch registers if that will   either produce a simpler sequence, or we will want to cse the values.   Return value is the number of insns emitted.  */intarm_split_constant (enum rtx_code code, enum machine_mode mode, rtx insn,		    HOST_WIDE_INT val, rtx target, rtx source, int subtargets){  rtx cond;  if (insn && GET_CODE (PATTERN (insn)) == COND_EXEC)    cond = COND_EXEC_TEST (PATTERN (insn));  else    cond = NULL_RTX;  if (subtargets || code == SET      || (GET_CODE (target) == REG && GET_CODE (source) == REG	  && REGNO (target) != REGNO (source)))    {      /* After arm_reorg has been called, we can't fix up expensive	 constants by pushing them into memory so we must synthesize

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -