📄 flow.c
字号:
block_live_static = block_live; bzero (block_live, n_basic_blocks); bzero (block_marked, n_basic_blocks); /* Initialize with just block 0 reachable and no blocks marked. */ if (n_basic_blocks > 0) block_live[0] = 1; /* Initialize the ref chain of each label to 0. */ /* Record where all the blocks start and end and their depth in loops. */ /* For each insn, record the block it is in. */ /* Also mark as reachable any blocks headed by labels that must not be deleted. */ { register RTX_CODE prev_code = JUMP_INSN; register RTX_CODE code; int depth = 1; for (insn = f, i = -1; insn; insn = NEXT_INSN (insn)) { code = GET_CODE (insn); if (code == NOTE) { if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG) depth++; else if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END) depth--; } /* A basic block starts at label, or after something that can jump. */ else if (code == CODE_LABEL || (GET_RTX_CLASS (code) == 'i' && (prev_code == JUMP_INSN || (prev_code == CALL_INSN && nonlocal_label_list != 0) || prev_code == BARRIER))) { basic_block_head[++i] = insn; basic_block_end[i] = insn; basic_block_loop_depth[i] = depth; if (code == CODE_LABEL) { LABEL_REFS (insn) = insn; /* Any label that cannot be deleted is considered to start a reachable block. */ if (LABEL_PRESERVE_P (insn)) block_live[i] = 1; } } else if (GET_RTX_CLASS (code) == 'i') { basic_block_end[i] = insn; basic_block_loop_depth[i] = depth; } /* Make a list of all labels referred to other than by jumps. */ if (code == INSN || code == CALL_INSN) { rtx note = find_reg_note (insn, REG_LABEL, NULL_RTX); if (note != 0) label_value_list = gen_rtx (EXPR_LIST, VOIDmode, XEXP (note, 0), label_value_list); } BLOCK_NUM (insn) = i; /* Don't separate a CALL_INSN from following CLOBBER insns. This is a kludge that will go away when each CALL_INSN records its USE and CLOBBERs. */ if (code != NOTE && ! (prev_code == CALL_INSN && code == INSN && GET_CODE (PATTERN (insn)) == CLOBBER)) prev_code = code; } if (i + 1 != n_basic_blocks) abort (); } /* Don't delete the labels (in this function) that are referenced by non-jump instructions. */ { register rtx x; for (x = label_value_list; x; x = XEXP (x, 1)) if (! LABEL_REF_NONLOCAL_P (x)) block_live[BLOCK_NUM (XEXP (x, 0))] = 1; } /* Record which basic blocks control can drop in to. */ { register int i; for (i = 0; i < n_basic_blocks; i++) { register rtx insn = PREV_INSN (basic_block_head[i]); /* TEMP1 is used to avoid a bug in Sequent's compiler. */ register int temp1; while (insn && GET_CODE (insn) == NOTE) insn = PREV_INSN (insn); temp1 = insn && GET_CODE (insn) != BARRIER; basic_block_drops_in[i] = temp1; } } /* Now find which basic blocks can actually be reached and put all jump insns' LABEL_REFS onto the ref-chains of their target labels. */ if (n_basic_blocks > 0) { int something_marked = 1; /* Find all indirect jump insns and mark them as possibly jumping to all the labels whose addresses are explicitly used. This is because, when there are computed gotos, we can't tell which labels they jump to, of all the possibilities. */ for (insn = f; insn; insn = NEXT_INSN (insn)) if (GET_CODE (insn) == JUMP_INSN && GET_CODE (PATTERN (insn)) == SET && SET_DEST (PATTERN (insn)) == pc_rtx && (GET_CODE (SET_SRC (PATTERN (insn))) == REG || GET_CODE (SET_SRC (PATTERN (insn))) == MEM)) { rtx x; for (x = label_value_list; x; x = XEXP (x, 1)) mark_label_ref (gen_rtx (LABEL_REF, VOIDmode, XEXP (x, 0)), insn, 0); for (x = forced_labels; x; x = XEXP (x, 1)) mark_label_ref (gen_rtx (LABEL_REF, VOIDmode, XEXP (x, 0)), insn, 0); } /* Find all call insns and mark them as possibly jumping to all the nonlocal goto handler labels. */ for (insn = f; insn; insn = NEXT_INSN (insn)) if (GET_CODE (insn) == CALL_INSN) { rtx x; for (x = nonlocal_label_list; x; x = XEXP (x, 1)) mark_label_ref (gen_rtx (LABEL_REF, VOIDmode, XEXP (x, 0)), insn, 0); /* ??? This could be made smarter: in some cases it's possible to tell that certain calls will not do a nonlocal goto. For example, if the nested functions that do the nonlocal gotos do not have their addresses taken, then only calls to those functions or to other nested functions that use them could possibly do nonlocal gotos. */ } /* Pass over all blocks, marking each block that is reachable and has not yet been marked. Keep doing this until, in one pass, no blocks have been marked. Then blocks_live and blocks_marked are identical and correct. In addition, all jumps actually reachable have been marked. */ while (something_marked) { something_marked = 0; for (i = 0; i < n_basic_blocks; i++) if (block_live[i] && !block_marked[i]) { block_marked[i] = 1; something_marked = 1; if (i + 1 < n_basic_blocks && basic_block_drops_in[i + 1]) block_live[i + 1] = 1; insn = basic_block_end[i]; if (GET_CODE (insn) == JUMP_INSN) mark_label_ref (PATTERN (insn), insn, 0); } } /* Now delete the code for any basic blocks that can't be reached. They can occur because jump_optimize does not recognize unreachable loops as unreachable. */ for (i = 0; i < n_basic_blocks; i++) if (!block_live[i]) { insn = basic_block_head[i]; while (1) { if (GET_CODE (insn) == BARRIER) abort (); if (GET_CODE (insn) != NOTE) { PUT_CODE (insn, NOTE); NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED; NOTE_SOURCE_FILE (insn) = 0; } if (insn == basic_block_end[i]) { /* BARRIERs are between basic blocks, not part of one. Delete a BARRIER if the preceding jump is deleted. We cannot alter a BARRIER into a NOTE because it is too short; but we can really delete it because it is not part of a basic block. */ if (NEXT_INSN (insn) != 0 && GET_CODE (NEXT_INSN (insn)) == BARRIER) delete_insn (NEXT_INSN (insn)); break; } insn = NEXT_INSN (insn); } /* Each time we delete some basic blocks, see if there is a jump around them that is being turned into a no-op. If so, delete it. */ if (block_live[i - 1]) { register int j; for (j = i; j < n_basic_blocks; j++) if (block_live[j]) { rtx label; insn = basic_block_end[i - 1]; if (GET_CODE (insn) == JUMP_INSN /* An unconditional jump is the only possibility we must check for, since a conditional one would make these blocks live. */ && simplejump_p (insn) && (label = XEXP (SET_SRC (PATTERN (insn)), 0), 1) && INSN_UID (label) != 0 && BLOCK_NUM (label) == j) { PUT_CODE (insn, NOTE); NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED; NOTE_SOURCE_FILE (insn) = 0; if (GET_CODE (NEXT_INSN (insn)) != BARRIER) abort (); delete_insn (NEXT_INSN (insn)); } break; } } } }}/* Check expression X for label references; if one is found, add INSN to the label's chain of references. CHECKDUP means check for and avoid creating duplicate references from the same insn. Such duplicates do no serious harm but can slow life analysis. CHECKDUP is set only when duplicates are likely. */static voidmark_label_ref (x, insn, checkdup) rtx x, insn; int checkdup;{ register RTX_CODE code; register int i; register char *fmt; /* We can be called with NULL when scanning label_value_list. */ if (x == 0) return; code = GET_CODE (x); if (code == LABEL_REF) { register rtx label = XEXP (x, 0); register rtx y; if (GET_CODE (label) != CODE_LABEL) abort (); /* If the label was never emitted, this insn is junk, but avoid a crash trying to refer to BLOCK_NUM (label). This can happen as a result of a syntax error and a diagnostic has already been printed. */ if (INSN_UID (label) == 0) return; CONTAINING_INSN (x) = insn; /* if CHECKDUP is set, check for duplicate ref from same insn and don't insert. */ if (checkdup) for (y = LABEL_REFS (label); y != label; y = LABEL_NEXTREF (y)) if (CONTAINING_INSN (y) == insn) return; LABEL_NEXTREF (x) = LABEL_REFS (label); LABEL_REFS (label) = x; block_live_static[BLOCK_NUM (label)] = 1; return; } fmt = GET_RTX_FORMAT (code); for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) { if (fmt[i] == 'e') mark_label_ref (XEXP (x, i), insn, 0); if (fmt[i] == 'E') { register int j; for (j = 0; j < XVECLEN (x, i); j++) mark_label_ref (XVECEXP (x, i, j), insn, 1); } }}/* Determine which registers are live at the start of each basic block of the function whose first insn is F. NREGS is the number of registers used in F. We allocate the vector basic_block_live_at_start and the regsets that it points to, and fill them with the data. regset_size and regset_bytes are also set here. */static voidlife_analysis (f, nregs) rtx f; int nregs;{ register regset tem; int first_pass; int changed; /* For each basic block, a bitmask of regs live on exit from the block. */ regset *basic_block_live_at_end; /* For each basic block, a bitmask of regs live on entry to a successor-block of this block. If this does not match basic_block_live_at_end, that must be updated, and the block must be rescanned. */ regset *basic_block_new_live_at_end; /* For each basic block, a bitmask of regs whose liveness at the end of the basic block can make a difference in which regs are live on entry to the block. These are the regs that are set within the basic block, possibly excluding those that are used after they are set. */ regset *basic_block_significant; register int i; rtx insn; struct obstack flow_obstack; gcc_obstack_init (&flow_obstack); max_regno = nregs; bzero (regs_ever_live, sizeof regs_ever_live); /* Allocate and zero out many data structures that will record the data from lifetime analysis. */ allocate_for_life_analysis (); reg_next_use = (rtx *) alloca (nregs * sizeof (rtx)); bzero (reg_next_use, nregs * sizeof (rtx)); /* Set up several regset-vectors used internally within this function. Their meanings are documented above, with their declarations. */ basic_block_live_at_end = (regset *) alloca (n_basic_blocks * sizeof (regset)); /* Don't use alloca since that leads to a crash rather than an error message if there isn't enough space. Don't use oballoc since we may need to allocate other things during this function on the temporary obstack. */ tem = (regset) obstack_alloc (&flow_obstack, n_basic_blocks * regset_bytes); bzero (tem, n_basic_blocks * regset_bytes); init_regset_vector (basic_block_live_at_end, tem, n_basic_blocks, regset_bytes); basic_block_new_live_at_end = (regset *) alloca (n_basic_blocks * sizeof (regset)); tem = (regset) obstack_alloc (&flow_obstack, n_basic_blocks * regset_bytes); bzero (tem, n_basic_blocks * regset_bytes); init_regset_vector (basic_block_new_live_at_end, tem, n_basic_blocks, regset_bytes); basic_block_significant = (regset *) alloca (n_basic_blocks * sizeof (regset)); tem = (regset) obstack_alloc (&flow_obstack, n_basic_blocks * regset_bytes); bzero (tem, n_basic_blocks * regset_bytes); init_regset_vector (basic_block_significant, tem, n_basic_blocks, regset_bytes); /* Record which insns refer to any volatile memory or for any reason can't be deleted just because they are dead stores. Also, delete any insns that copy a register to itself. */ for (insn = f; insn; insn = NEXT_INSN (insn)) { enum rtx_code code1 = GET_CODE (insn); if (code1 == CALL_INSN) INSN_VOLATILE (insn) = 1; else if (code1 == INSN || code1 == JUMP_INSN) { /* Delete (in effect) any obvious no-op moves. */ if (GET_CODE (PATTERN (insn)) == SET && GET_CODE (SET_DEST (PATTERN (insn))) == REG && GET_CODE (SET_SRC (PATTERN (insn))) == REG && REGNO (SET_DEST (PATTERN (insn))) == REGNO (SET_SRC (PATTERN (insn))) /* Insns carrying these notes are useful later on. */ && ! find_reg_note (insn, REG_EQUAL, NULL_RTX)) { PUT_CODE (insn, NOTE); NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED; NOTE_SOURCE_FILE (insn) = 0; } else if (GET_CODE (PATTERN (insn)) == PARALLEL)
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -