⭐ 欢迎来到虫虫下载站! | 📦 资源下载 📁 资源专辑 ℹ️ 关于我们
⭐ 虫虫下载站

📄 reorg.c

📁 早期freebsd实现
💻 C
📖 第 1 页 / 共 5 页
字号:
}   /* Delete INSN from the the delay slot of the insn that it is in.  This may   produce an insn without anything in its delay slots.  */static voiddelete_from_delay_slot (insn)     rtx insn;{  rtx trial, seq_insn, seq, prev;  rtx delay_list = 0;  int i;  /* We first must find the insn containing the SEQUENCE with INSN in its     delay slot.  Do this by finding an insn, TRIAL, where     PREV_INSN (NEXT_INSN (TRIAL)) != TRIAL.  */  for (trial = insn;       PREV_INSN (NEXT_INSN (trial)) == trial;       trial = NEXT_INSN (trial))    ;  seq_insn = PREV_INSN (NEXT_INSN (trial));  seq = PATTERN (seq_insn);  /* Create a delay list consisting of all the insns other than the one     we are deleting (unless we were the only one).  */  if (XVECLEN (seq, 0) > 2)    for (i = 1; i < XVECLEN (seq, 0); i++)      if (XVECEXP (seq, 0, i) != insn)	delay_list = add_to_delay_list (XVECEXP (seq, 0, i), delay_list);  /* Delete the old SEQUENCE, re-emit the insn that used to have the delay     list, and rebuild the delay list if non-empty.  */  prev = PREV_INSN (seq_insn);  trial = XVECEXP (seq, 0, 0);  delete_insn (seq_insn);  add_insn_after (trial, prev);  if (GET_CODE (trial) == JUMP_INSN      && (simplejump_p (trial) || GET_CODE (PATTERN (trial)) == RETURN))    emit_barrier_after (trial);  /* If there are any delay insns, remit them.  Otherwise clear the     annul flag.  */  if (delay_list)    trial = emit_delay_sequence (trial, delay_list, XVECLEN (seq, 0) - 2, 0);  else    INSN_ANNULLED_BRANCH_P (trial) = 0;  INSN_FROM_TARGET_P (insn) = 0;  /* Show we need to fill this insn again.  */  obstack_ptr_grow (&unfilled_slots_obstack, trial);}/* Delete INSN, a JUMP_INSN.  If it is a conditional jump, we must track down   the insn that sets CC0 for it and delete it too.  */static voiddelete_scheduled_jump (insn)     rtx insn;{  /* Delete the insn that sets cc0 for us.  On machines without cc0, we could     delete the insn that sets the condition code, but it is hard to find it.     Since this case is rare anyway, don't bother trying; there would likely     be other insns that became dead anyway, which we wouldn't know to     delete.  */#ifdef HAVE_cc0  if (reg_mentioned_p (cc0_rtx, insn))    {      rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);      /* If a reg-note was found, it points to an insn to set CC0.  This	 insn is in the delay list of some other insn.  So delete it from	 the delay list it was in.  */      if (note)	{	  if (! FIND_REG_INC_NOTE (XEXP (note, 0), NULL_RTX)	      && sets_cc0_p (PATTERN (XEXP (note, 0))) == 1)	    delete_from_delay_slot (XEXP (note, 0));	}      else	{	  /* The insn setting CC0 is our previous insn, but it may be in	     a delay slot.  It will be the last insn in the delay slot, if	     it is.  */	  rtx trial = previous_insn (insn);	  if (GET_CODE (trial) == NOTE)	    trial = prev_nonnote_insn (trial);	  if (sets_cc0_p (PATTERN (trial)) != 1	      || FIND_REG_INC_NOTE (trial, 0))	    return;	  if (PREV_INSN (NEXT_INSN (trial)) == trial)	    delete_insn (trial);	  else	    delete_from_delay_slot (trial);	}    }#endif  delete_insn (insn);}/* Counters for delay-slot filling.  */#define NUM_REORG_FUNCTIONS 2#define MAX_DELAY_HISTOGRAM 3#define MAX_REORG_PASSES 2static int num_insns_needing_delays[NUM_REORG_FUNCTIONS][MAX_REORG_PASSES];static int num_filled_delays[NUM_REORG_FUNCTIONS][MAX_DELAY_HISTOGRAM+1][MAX_REORG_PASSES];static int reorg_pass_number;static voidnote_delay_statistics (slots_filled, index)     int slots_filled, index;{  num_insns_needing_delays[index][reorg_pass_number]++;  if (slots_filled > MAX_DELAY_HISTOGRAM)    slots_filled = MAX_DELAY_HISTOGRAM;  num_filled_delays[index][slots_filled][reorg_pass_number]++;}#if defined(ANNUL_IFFALSE_SLOTS) || defined(ANNUL_IFTRUE_SLOTS)/* Optimize the following cases:   1.  When a conditional branch skips over only one instruction,       use an annulling branch and put that insn in the delay slot.       Use either a branch that annuls when the condition if true or       invert the test with a branch that annuls when the condition is       false.  This saves insns, since otherwise we must copy an insn       from the L1 target.        (orig)		 (skip)		(otherwise)	Bcc.n L1	Bcc',a L1	Bcc,a L1'	insn		insn		insn2      L1:	      L1:	      L1:	insn2		insn2		insn2	insn3		insn3	      L1':					insn3   2.  When a conditional branch skips over only one instruction,       and after that, it unconditionally branches somewhere else,       perform the similar optimization. This saves executing the       second branch in the case where the inverted condition is true.	Bcc.n L1	Bcc',a L2	insn		insn      L1:	      L1:	Bra L2		Bra L2   INSN is a JUMP_INSN.   This should be expanded to skip over N insns, where N is the number   of delay slots required.  */static rtxoptimize_skip (insn)     register rtx insn;{  register rtx trial = next_nonnote_insn (insn);  rtx next_trial = next_active_insn (trial);  rtx delay_list = 0;  rtx target_label;  if (trial == 0      || GET_CODE (trial) != INSN      || GET_CODE (PATTERN (trial)) == SEQUENCE      || recog_memoized (trial) < 0      || (! eligible_for_annul_false (insn, 0, trial)	  && ! eligible_for_annul_true (insn, 0, trial)))    return 0;  /* There are two cases where we are just executing one insn (we assume     here that a branch requires only one insn; this should be generalized     at some point):  Where the branch goes around a single insn or where     we have one insn followed by a branch to the same label we branch to.     In both of these cases, inverting the jump and annulling the delay     slot give the same effect in fewer insns.  */  if ((next_trial == next_active_insn (JUMP_LABEL (insn)))      || (next_trial != 0	  && GET_CODE (next_trial) == JUMP_INSN	  && JUMP_LABEL (insn) == JUMP_LABEL (next_trial)	  && (simplejump_p (next_trial)	      || GET_CODE (PATTERN (next_trial)) == RETURN)))    {      if (eligible_for_annul_false (insn, 0, trial))	{	  if (invert_jump (insn, JUMP_LABEL (insn)))	    INSN_FROM_TARGET_P (trial) = 1;	  else if (! eligible_for_annul_true (insn, 0, trial))	    return 0;	}      delay_list = add_to_delay_list (trial, NULL_RTX);      next_trial = next_active_insn (trial);      update_block (trial, trial);      delete_insn (trial);      /* Also, if we are targeting an unconditional	 branch, thread our jump to the target of that branch.  Don't	 change this into a RETURN here, because it may not accept what	 we have in the delay slot.  We'll fix this up later.  */      if (next_trial && GET_CODE (next_trial) == JUMP_INSN	  && (simplejump_p (next_trial)	      || GET_CODE (PATTERN (next_trial)) == RETURN))	{	  target_label = JUMP_LABEL (next_trial);	  if (target_label == 0)	    target_label = find_end_label ();	  redirect_jump (insn, target_label);	}      INSN_ANNULLED_BRANCH_P (insn) = 1;    }  return delay_list;}#endif/* Return truth value of the statement that this branch   is mostly taken.  If we think that the branch is extremely likely   to be taken, we return 2.  If the branch is slightly more likely to be   taken, return 1.  Otherwise, return 0.   CONDITION, if non-zero, is the condition that JUMP_INSN is testing.  */static intmostly_true_jump (jump_insn, condition)     rtx jump_insn, condition;{  rtx target_label = JUMP_LABEL (jump_insn);  rtx insn;  /* If this is a conditional return insn, assume it won't return.  */  if (target_label == 0)    return 0;  /* If TARGET_LABEL has no jumps between it and the end of the function,     this is essentially a conditional return, so predict it as false.  */  for (insn = NEXT_INSN (target_label);       insn && GET_CODE (insn) != JUMP_INSN;       insn = NEXT_INSN (insn))    ;  if (insn == 0)    return 0;  /* If this is the test of a loop, it is very likely true.  We scan backwards     from the target label.  If we find a NOTE_INSN_LOOP_BEG before the next     real insn, we assume the branch is to the top of the loop.  */  for (insn = PREV_INSN (target_label);       insn && GET_CODE (insn) == NOTE;       insn = PREV_INSN (insn))    if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG)      return 2;  /* If we couldn't figure out what this jump was, assume it won't be      taken.  This should be rare.  */  if (condition == 0)    return 0;  /* EQ tests are usually false and NE tests are usually true.  Also,     most quantities are positive, so we can make the appropriate guesses     about signed comparisons against zero.  */  switch (GET_CODE (condition))    {    case CONST_INT:      /* Unconditional branch.  */      return 1;    case EQ:      return 0;    case NE:      return 1;    case LE:    case LT:      if (XEXP (condition, 1) == const0_rtx)        return 0;      break;    case GE:    case GT:      if (XEXP (condition, 1) == const0_rtx)	return 1;      break;    }  /* Predict backward branches usually take, forward branches usually not.  If     we don't know whether this is forward or backward, assume the branch     will be taken, since most are.  */  return (INSN_UID (jump_insn) > max_uid || INSN_UID (target_label) > max_uid	  || (uid_to_ruid[INSN_UID (jump_insn)]	      > uid_to_ruid[INSN_UID (target_label)]));;}/* Return the condition under which INSN will branch to TARGET.  If TARGET   is zero, return the condition under which INSN will return.  If INSN is   an unconditional branch, return const_true_rtx.  If INSN isn't a simple   type of jump, or it doesn't go to TARGET, return 0.  */static rtxget_branch_condition (insn, target)     rtx insn;     rtx target;{  rtx pat = PATTERN (insn);  rtx src;    if (GET_CODE (pat) == RETURN)    return target == 0 ? const_true_rtx : 0;  else if (GET_CODE (pat) != SET || SET_DEST (pat) != pc_rtx)    return 0;  src = SET_SRC (pat);  if (GET_CODE (src) == LABEL_REF && XEXP (src, 0) == target)    return const_true_rtx;  else if (GET_CODE (src) == IF_THEN_ELSE	   && ((target == 0 && GET_CODE (XEXP (src, 1)) == RETURN)	       || (GET_CODE (XEXP (src, 1)) == LABEL_REF		   && XEXP (XEXP (src, 1), 0) == target))	   && XEXP (src, 2) == pc_rtx)    return XEXP (src, 0);  else if (GET_CODE (src) == IF_THEN_ELSE	   && ((target == 0 && GET_CODE (XEXP (src, 2)) == RETURN)	       || (GET_CODE (XEXP (src, 2)) == LABEL_REF		   && XEXP (XEXP (src, 2), 0) == target))	   && XEXP (src, 1) == pc_rtx)    return gen_rtx (reverse_condition (GET_CODE (XEXP (src, 0))),		    GET_MODE (XEXP (src, 0)),		    XEXP (XEXP (src, 0), 0), XEXP (XEXP (src, 0), 1));  return 0;}/* Return non-zero if CONDITION is more strict than the condition of   INSN, i.e., if INSN will always branch if CONDITION is true.  */static intcondition_dominates_p (condition, insn)     rtx condition;     rtx insn;{  rtx other_condition = get_branch_condition (insn, JUMP_LABEL (insn));  enum rtx_code code = GET_CODE (condition);  enum rtx_code other_code;  if (rtx_equal_p (condition, other_condition)      || other_condition == const_true_rtx)    return 1;  else if (condition == const_true_rtx || other_condition == 0)    return 0;  other_code = GET_CODE (other_condition);  if (GET_RTX_LENGTH (code) != 2 || GET_RTX_LENGTH (other_code) != 2      || ! rtx_equal_p (XEXP (condition, 0), XEXP (other_condition, 0))      || ! rtx_equal_p (XEXP (condition, 1), XEXP (other_condition, 1)))    return 0;  return comparison_dominates_p (code, other_code);}/* INSN branches to an insn whose pattern SEQ is a SEQUENCE.  Given that   the condition tested by INSN is CONDITION and the resources shown in   OTHER_NEEDED are needed after INSN, see whether INSN can take all the insns   from SEQ's delay list, in addition to whatever insns it may execute   (in DELAY_LIST).   SETS and NEEDED are denote resources already set and   needed while searching for delay slot insns.  Return the concatenated   delay list if possible, otherwise, return 0.   SLOTS_TO_FILL is the total number of slots required by INSN, and   PSLOTS_FILLED points to the number filled so far (also the number of   insns in DELAY_LIST).  It is updated with the number that have been   filled from the SEQUENCE, if any.   PANNUL_P points to a non-zero value if we already know that we need   to annul INSN.  If this routine determines that annulling is needed,   it may set that value non-zero.   PNEW_THREAD points to a location that is to receive the place at which   execution should continue.  */static rtxsteal_delay_list_from_target (insn, condition, seq, delay_list,			      sets, needed, other_needed,			      slots_to_fill, pslots_filled, pannul_p,			      pnew_thread)     rtx insn, condition;     rtx seq;     rtx delay_list;     struct resources *sets, *needed, *other_needed;     int slots_to_fill;     int *pslots_filled;     int *pannul_p;     rtx *pnew_thread;{  rtx temp;  int slots_remaining = slots_to_fill - *pslots_filled;  int total_slots_filled = *pslots_filled;  rtx new_delay_list = 0;  int must_annul = *pannul_p;  int i;  /* We can't do anything if there are more delay slots in SEQ than we     can handle, or if we don't know that it will be a taken branch.

⌨️ 快捷键说明

复制代码 Ctrl + C
搜索代码 Ctrl + F
全屏模式 F11
切换主题 Ctrl + Shift + D
显示快捷键 ?
增大字号 Ctrl + =
减小字号 Ctrl + -