📄 reorg.c
字号:
continue; if (GET_CODE (pat) == SEQUENCE) { /* If this is a CALL_INSN and its delay slots, it is hard to track the resource needs properly, so give up. */ if (GET_CODE (XVECEXP (pat, 0, 0)) == CALL_INSN) return 0; /* See if any of the insns in the delay slot match, updating resource requirements as we go. */ for (i = XVECLEN (pat, 0) - 1; i > 0; i--) { rtx candidate = XVECEXP (pat, 0, i); /* If an insn will be annulled if the branch is false, it isn't considered as a possible duplicate insn. */ if (rtx_equal_p (PATTERN (candidate), ipat) && ! (INSN_ANNULLED_BRANCH_P (XVECEXP (pat, 0, 0)) && INSN_FROM_TARGET_P (candidate))) { /* Show that this insn will be used in the sequel. */ INSN_FROM_TARGET_P (candidate) = 0; return 1; } /* Unless this is an annulled insn from the target of a branch, we must stop if it sets anything needed or set by INSN. */ if ((! INSN_ANNULLED_BRANCH_P (XVECEXP (pat, 0, 0)) || ! INSN_FROM_TARGET_P (candidate)) && insn_sets_resource_p (candidate, &needed, 1)) return 0; } /* If the insn requiring the delay slot conflicts with INSN, we must stop. */ if (insn_sets_resource_p (XVECEXP (pat, 0, 0), &needed, 1)) return 0; } else { /* See if TRIAL is the same as INSN. */ pat = PATTERN (trial); if (rtx_equal_p (pat, ipat)) return 1; /* Can't go any further if TRIAL conflicts with INSN. */ if (insn_sets_resource_p (trial, &needed, 1)) return 0; } } return 0;}/* Return 1 if THREAD can only be executed in one way. If LABEL is non-zero, it is the target of the branch insn being scanned. If ALLOW_FALLTHROUGH is non-zero, we are allowed to fall into this thread; otherwise, we are not. If LABEL is used more than one or we pass a label other than LABEL before finding an active insn, we do not own this thread. */static intown_thread_p (thread, label, allow_fallthrough) rtx thread; rtx label; int allow_fallthrough;{ rtx active_insn; rtx insn; /* We don't own the function end. */ if (thread == 0) return 0; /* Get the first active insn, or THREAD, if it is an active insn. */ active_insn = next_active_insn (PREV_INSN (thread)); for (insn = thread; insn != active_insn; insn = NEXT_INSN (insn)) if (GET_CODE (insn) == CODE_LABEL && (insn != label || LABEL_NUSES (insn) != 1)) return 0; if (allow_fallthrough) return 1; /* Ensure that we reach a BARRIER before any insn or label. */ for (insn = prev_nonnote_insn (thread); insn == 0 || GET_CODE (insn) != BARRIER; insn = prev_nonnote_insn (insn)) if (insn == 0 || GET_CODE (insn) == CODE_LABEL || (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) != USE && GET_CODE (PATTERN (insn)) != CLOBBER)) return 0; return 1;}/* Find the number of the basic block that starts closest to INSN. Return -1 if we couldn't find such a basic block. */static intfind_basic_block (insn) rtx insn;{ int i; /* Scan backwards to the previous BARRIER. Then see if we can find a label that starts a basic block. Return the basic block number. */ for (insn = prev_nonnote_insn (insn); insn && GET_CODE (insn) != BARRIER; insn = prev_nonnote_insn (insn)) ; /* The start of the function is basic block zero. */ if (insn == 0) return 0; /* See if any of the upcoming CODE_LABELs start a basic block. If we reach anything other than a CODE_LABEL or note, we can't find this code. */ for (insn = next_nonnote_insn (insn); insn && GET_CODE (insn) == CODE_LABEL; insn = next_nonnote_insn (insn)) { for (i = 0; i < n_basic_blocks; i++) if (insn == basic_block_head[i]) return i; } return -1;}/* Called when INSN is being moved from a location near the target of a jump. We leave a marker of the form (use (INSN)) immediately in front of WHERE for mark_target_live_regs. These markers will be deleted when reorg finishes. We used to try to update the live status of registers if WHERE is at the start of a basic block, but that can't work since we may remove a BARRIER in relax_delay_slots. */static voidupdate_block (insn, where) rtx insn; rtx where;{ int b; /* Ignore if this was in a delay slot and it came from the target of a branch. */ if (INSN_FROM_TARGET_P (insn)) return; emit_insn_before (gen_rtx (USE, VOIDmode, insn), where); /* INSN might be making a value live in a block where it didn't use to be. So recompute liveness information for this block. */ b = find_basic_block (insn); if (b != -1) bb_ticks[b]++;}/* Marks registers possibly live at the current place being scanned by mark_target_live_regs. Used only by next two function. */static HARD_REG_SET current_live_regs;/* Marks registers for which we have seen a REG_DEAD note but no assignment. Also only used by the next two functions. */static HARD_REG_SET pending_dead_regs;/* Utility function called from mark_target_live_regs via note_stores. It deadens any CLOBBERed registers and livens any SET registers. */static voidupdate_live_status (dest, x) rtx dest; rtx x;{ int first_regno, last_regno; int i; if (GET_CODE (dest) != REG && (GET_CODE (dest) != SUBREG || GET_CODE (SUBREG_REG (dest)) != REG)) return; if (GET_CODE (dest) == SUBREG) first_regno = REGNO (SUBREG_REG (dest)) + SUBREG_WORD (dest); else first_regno = REGNO (dest); last_regno = first_regno + HARD_REGNO_NREGS (first_regno, GET_MODE (dest)); if (GET_CODE (x) == CLOBBER) for (i = first_regno; i < last_regno; i++) CLEAR_HARD_REG_BIT (current_live_regs, i); else for (i = first_regno; i < last_regno; i++) { SET_HARD_REG_BIT (current_live_regs, i); CLEAR_HARD_REG_BIT (pending_dead_regs, i); }}/* Similar to next_insn, but ignores insns in the delay slots of an annulled branch. */static rtxnext_insn_no_annul (insn) rtx insn;{ if (insn) { /* If INSN is an annulled branch, skip any insns from the target of the branch. */ if (INSN_ANNULLED_BRANCH_P (insn) && NEXT_INSN (PREV_INSN (insn)) != insn) while (INSN_FROM_TARGET_P (NEXT_INSN (insn))) insn = NEXT_INSN (insn); insn = NEXT_INSN (insn); if (insn && GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == SEQUENCE) insn = XVECEXP (PATTERN (insn), 0, 0); } return insn;}/* Set the resources that are live at TARGET. If TARGET is zero, we refer to the end of the current function and can return our precomputed value. Otherwise, we try to find out what is live by consulting the basic block information. This is tricky, because we must consider the actions of reload and jump optimization, which occur after the basic block information has been computed. Accordingly, we proceed as follows:: We find the previous BARRIER and look at all immediately following labels (with no intervening active insns) to see if any of them start a basic block. If we hit the start of the function first, we use block 0. Once we have found a basic block and a corresponding first insns, we can accurately compute the live status from basic_block_live_regs and reg_renumber. (By starting at a label following a BARRIER, we are immune to actions taken by reload and jump.) Then we scan all insns between that point and our target. For each CLOBBER (or for call-clobbered regs when we pass a CALL_INSN), mark the appropriate registers are dead. For a SET, mark them as live. We have to be careful when using REG_DEAD notes because they are not updated by such things as find_equiv_reg. So keep track of registers marked as dead that haven't been assigned to, and mark them dead at the next CODE_LABEL since reload and jump won't propagate values across labels. If we cannot find the start of a basic block (should be a very rare case, if it can happen at all), mark everything as potentially live. Next, scan forward from TARGET looking for things set or clobbered before they are used. These are not live. Because we can be called many times on the same target, save our results in a hash table indexed by INSN_UID. */static voidmark_target_live_regs (target, res) rtx target; struct resources *res;{ int b = -1; int i; struct target_info *tinfo; rtx insn, next; rtx jump_insn = 0; rtx jump_target; HARD_REG_SET scratch; struct resources set, needed; int jump_count = 0; /* Handle end of function. */ if (target == 0) { *res = end_of_function_needs; return; } /* We have to assume memory is needed, but the CC isn't. */ res->memory = 1; res->volatil = 0; res->cc = 0; /* See if we have computed this value already. */ for (tinfo = target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME]; tinfo; tinfo = tinfo->next) if (tinfo->uid == INSN_UID (target)) break; /* Start by getting the basic block number. If we have saved information, we can get it from there unless the insn at the start of the basic block has been deleted. */ if (tinfo && tinfo->block != -1 && ! INSN_DELETED_P (basic_block_head[tinfo->block])) b = tinfo->block; if (b == -1) b = find_basic_block (target); if (tinfo) { /* If the information is up-to-date, use it. Otherwise, we will update it below. */ if (b == tinfo->block && b != -1 && tinfo->bb_tick == bb_ticks[b]) { COPY_HARD_REG_SET (res->regs, tinfo->live_regs); return; } } else { /* Allocate a place to put our results and chain it into the hash table. */ tinfo = (struct target_info *) oballoc (sizeof (struct target_info)); tinfo->uid = INSN_UID (target); tinfo->block = b; tinfo->next = target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME]; target_hash_table[INSN_UID (target) % TARGET_HASH_PRIME] = tinfo; } CLEAR_HARD_REG_SET (pending_dead_regs); /* If we found a basic block, get the live registers from it and update them with anything set or killed between its start and the insn before TARGET. Otherwise, we must assume everything is live. */ if (b != -1) { regset regs_live = basic_block_live_at_start[b]; int offset, j; REGSET_ELT_TYPE bit; int regno; rtx start_insn, stop_insn; /* Compute hard regs live at start of block -- this is the real hard regs marked live, plus live pseudo regs that have been renumbered to hard regs. */#ifdef HARD_REG_SET current_live_regs = *regs_live;#else COPY_HARD_REG_SET (current_live_regs, regs_live);#endif for (offset = 0, i = 0; offset < regset_size; offset++) { if (regs_live[offset] == 0) i += REGSET_ELT_BITS; else for (bit = 1; bit && i < max_regno; bit <<= 1, i++) if ((regs_live[offset] & bit) && (regno = reg_renumber[i]) >= 0) for (j = regno; j < regno + HARD_REGNO_NREGS (regno, PSEUDO_REGNO_MODE (i)); j++) SET_HARD_REG_BIT (current_live_regs, j); } /* Get starting and ending insn, handling the case where each might be a SEQUENCE. */ start_insn = (b == 0 ? get_insns () : basic_block_head[b]); stop_insn = target; if (GET_CODE (start_insn) == INSN && GET_CODE (PATTERN (start_insn)) == SEQUENCE) start_insn = XVECEXP (PATTERN (start_insn), 0, 0); if (GET_CODE (stop_insn) == INSN && GET_CODE (PATTERN (stop_insn)) == SEQUENCE) stop_insn = next_insn (PREV_INSN (stop_insn)); for (insn = start_insn; insn != stop_insn; insn = next_insn_no_annul (insn)) { rtx link; rtx real_insn = insn; /* If this insn is from the target of a branch, it isn't going to be used in the sequel. If it is used in both cases, this test will not be true. */ if (INSN_FROM_TARGET_P (insn)) continue; /* If this insn is a USE made by update_block, we care about the underlying insn. */ if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) == USE && GET_RTX_CLASS (GET_CODE (XEXP (PATTERN (insn), 0))) == 'i') real_insn = XEXP (PATTERN (insn), 0); if (GET_CODE (real_insn) == CALL_INSN) { /* CALL clobbers all call-used regs that aren't
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -