📄 reload1.c
字号:
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++) { spill_reg_order[i] = -1; forbidden_regs[i] = -1; } if (caller_save_needed) frame_pointer_needed = 1; if (frame_pointer_needed) { forbidden_regs[FRAME_POINTER_REGNUM] = 1; spill_hard_reg (FRAME_POINTER_REGNUM, global, dumpfile); } if (global) { basic_block_needs = (char *)alloca (n_basic_blocks); bzero (basic_block_needs, n_basic_blocks); } /* This loop scans the entire function each go-round and repeats until one repetition spills no additional hard regs. */ /* This flag is set when a psuedo reg is spilled, to require another pass. Note that getting an additional reload reg does not necessarily imply any pseudo reg was spilled; sometimes we find a reload reg that no pseudo reg was allocated in. */ something_changed = 1; /* This flag is set if there are any insns that require reloading. */ something_needs_reloads = 0; while (something_changed) { /* For each class, number of reload regs needed in that class. This is the maximum over all insns of the needs in that class of the individual insn. */ int max_needs[N_REG_CLASSES]; /* For each class, size of group of consecutive regs that is needed for the reloads of this class. */ int group_size[N_REG_CLASSES]; /* For each class, max number of consecutive groups needed. (Each group contains max_needs_size[CLASS] consecutive registers.) */ int max_groups[N_REG_CLASSES]; /* For each class, max number needed of regs that don't belong to any of the groups. */ int max_nongroups[N_REG_CLASSES]; /* For each class, the machine mode which requires consecutive groups of regs of that class. If two different modes ever require groups of one class, they must be the same size and equally restrictive for that class, otherwise we can't handle the complexity. */ enum machine_mode group_mode[N_REG_CLASSES]; something_changed = 0; bzero (max_needs, sizeof max_needs); bzero (max_groups, sizeof max_groups); bzero (max_nongroups, sizeof max_nongroups); bzero (group_size, sizeof group_size); for (i = 0; i < N_REG_CLASSES; i++) group_mode[i] = VOIDmode; /* Keep track of which basic blocks are needing the reloads. */ this_block = 0; /* Remember whether any element of basic_block_needs changes from 0 to 1 in this pass. */ new_basic_block_needs = 0; /* Compute the most additional registers needed by any instruction. Collect information separately for each class of regs. */ for (insn = first; insn; insn = NEXT_INSN (insn)) { rtx after_call = 0; if (global && this_block + 1 < n_basic_blocks && insn == basic_block_head[this_block+1]) ++this_block; if (GET_CODE (insn) == INSN || GET_CODE (insn) == JUMP_INSN || GET_CODE (insn) == CALL_INSN) { /* Nonzero means don't use a reload reg that overlaps the place where a function value can be returned. */ rtx avoid_return_reg = 0; /* Initially, count RELOAD_OTHER reloads. Later, merge in the other kinds. */ int insn_needs[N_REG_CLASSES]; int insn_groups[N_REG_CLASSES]; int insn_total_groups = 0; /* Count RELOAD_FOR_INPUT_RELOAD_ADDRESS reloads. */ int insn_needs_for_inputs[N_REG_CLASSES]; int insn_groups_for_inputs[N_REG_CLASSES]; int insn_total_groups_for_inputs = 0; /* Count RELOAD_FOR_OUTPUT_RELOAD_ADDRESS reloads. */ int insn_needs_for_outputs[N_REG_CLASSES]; int insn_groups_for_outputs[N_REG_CLASSES]; int insn_total_groups_for_outputs = 0; /* Count RELOAD_FOR_OPERAND_ADDRESS reloads. */ int insn_needs_for_operands[N_REG_CLASSES]; int insn_groups_for_operands[N_REG_CLASSES]; int insn_total_groups_for_operands = 0; for (i = 0; i < N_REG_CLASSES; i++) { insn_needs[i] = 0, insn_groups[i] = 0; insn_needs_for_inputs[i] = 0, insn_groups_for_inputs[i] = 0; insn_needs_for_outputs[i] = 0, insn_groups_for_outputs[i] = 0; insn_needs_for_operands[i] = 0, insn_groups_for_operands[i] = 0; }#if 0 /* This wouldn't work nowadays, since optimize_bit_field looks for non-strict memory addresses. */ /* Optimization: a bit-field instruction whose field happens to be a byte or halfword in memory can be changed to a move instruction. */ if (GET_CODE (PATTERN (insn)) == SET) { rtx dest = SET_DEST (PATTERN (insn)); rtx src = SET_SRC (PATTERN (insn)); if (GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == SIGN_EXTRACT) optimize_bit_field (PATTERN (insn), insn, reg_equiv_mem); if (GET_CODE (src) == ZERO_EXTRACT || GET_CODE (src) == SIGN_EXTRACT) optimize_bit_field (PATTERN (insn), insn, reg_equiv_mem); }#endif /* Set avoid_return_reg if this is an insn that might use the value of a function call. */ if (GET_CODE (insn) == CALL_INSN) { if (GET_CODE (PATTERN (insn)) == SET) after_call = SET_DEST (PATTERN (insn)); else if (GET_CODE (PATTERN (insn)) == PARALLEL && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET) after_call = SET_DEST (XVECEXP (PATTERN (insn), 0, 0)); else after_call = 0; } else if (after_call != 0 && !(GET_CODE (PATTERN (insn)) == SET && SET_DEST (PATTERN (insn)) == stack_pointer_rtx)) { if (reg_mentioned_p (after_call, PATTERN (insn))) avoid_return_reg = after_call; after_call = 0; } /* Analyze the instruction. */ find_reloads (insn, 0, spill_indirect_ok, global, spill_reg_order); if (n_reloads == 0) continue; something_needs_reloads = 1; /* Count each reload once in every class containing the reload's own class. */ for (i = 0; i < n_reloads; i++) { register enum reg_class *p; int size; enum machine_mode mode; int *this_groups; int *this_needs; int *this_total_groups; /* Don't use dummy reloads in regs being spilled in this block. */ if (reload_reg_rtx[i] != 0 && (!global || basic_block_needs[this_block]) && spill_reg_order[REGNO (reload_reg_rtx[i])] >= 0) reload_reg_rtx[i] = 0; /* Don't count the dummy reloads, for which one of the regs mentioned in the insn can be used for reloading. Don't count optional reloads. Don't count reloads that got combined with others. */ if (reload_reg_rtx[i] != 0 || reload_optional[i] != 0 || (reload_out[i] == 0 && reload_in[i] == 0)) continue; /* Decide which time-of-use to count this reload for. */ switch (reload_when_needed[i]) { case RELOAD_OTHER: this_needs = insn_needs; this_groups = insn_groups; this_total_groups = &insn_total_groups; break; case RELOAD_FOR_INPUT_RELOAD_ADDRESS: this_needs = insn_needs_for_inputs; this_groups = insn_groups_for_inputs; this_total_groups = &insn_total_groups_for_inputs; break; case RELOAD_FOR_OUTPUT_RELOAD_ADDRESS: this_needs = insn_needs_for_outputs; this_groups = insn_groups_for_outputs; this_total_groups = &insn_total_groups_for_outputs; break; case RELOAD_FOR_OPERAND_ADDRESS: this_needs = insn_needs_for_operands; this_groups = insn_groups_for_operands; this_total_groups = &insn_total_groups_for_operands; break; } mode = reload_inmode[i]; if (GET_MODE_SIZE (reload_outmode[i]) > GET_MODE_SIZE (mode)) mode = reload_outmode[i]; size = CLASS_MAX_NREGS (reload_reg_class[i], mode); if (size > 1) { /* Count number of groups needed separately from number of individual regs needed. */ this_groups[(int) reload_reg_class[i]]++; p = reg_class_superclasses[(int) reload_reg_class[i]]; while (*p != LIM_REG_CLASSES) this_groups[(int) *p++]++; (*this_total_groups)++; /* If a group of consecutive regs are needed, record which machine mode needs them. Crash if two dissimilar machine modes both need groups of consecutive regs of the same class. */ if (group_mode[(int) reload_reg_class[i]] != VOIDmode && (! modes_equiv_for_class_p (group_mode[(int) reload_reg_class[i]], mode, reload_reg_class[i]) || group_size[(int) reload_reg_class[i]] != size)) abort (); /* Record size and mode of a group of this class. */ group_size[(int) reload_reg_class[i]] = size; group_mode[(int) reload_reg_class[i]] = mode; } else if (size == 1) { this_needs[(int) reload_reg_class[i]] += 1; p = reg_class_superclasses[(int) reload_reg_class[i]]; while (*p != LIM_REG_CLASSES) this_needs[(int) *p++] += 1; } else abort (); if (global) { if (! basic_block_needs[this_block]) new_basic_block_needs = 1; basic_block_needs[this_block] = 1; } } /* All reloads have been counted for this insn; now merge the various times of use. This sets insn_needs, etc., to the maximum total number of registers needed at any point in this insn. */ for (i = 0; i < N_REG_CLASSES; i++) { int this_max; this_max = insn_needs_for_inputs[i]; if (insn_needs_for_outputs[i] > this_max) this_max = insn_needs_for_outputs[i]; if (insn_needs_for_operands[i] > this_max) this_max = insn_needs_for_operands[i]; insn_needs[i] += this_max; this_max = insn_groups_for_inputs[i]; if (insn_groups_for_outputs[i] > this_max) this_max = insn_groups_for_outputs[i]; if (insn_groups_for_operands[i] > this_max) this_max = insn_groups_for_operands[i]; insn_groups[i] += this_max; } insn_total_groups += max (insn_total_groups_for_inputs, max (insn_total_groups_for_outputs, insn_total_groups_for_operands)); /* Remember for later shortcuts which insns had any reloads. */ PUT_MODE (insn, n_reloads ? QImode : VOIDmode); /* If this insn stores the value of a function call, and that value is in a register that has been spilled, and if the insn needs a reload in a class that might use that register as the reload register, then add add an extra need in that class. This makes sure we have a register available that does not overlap the return value. */ if (avoid_return_reg) { int regno = REGNO (avoid_return_reg); int nregs = HARD_REGNO_NREGS (regno, GET_MODE (avoid_return_reg)); int r; int inc_groups = 0; for (r = regno; r < regno + nregs; r++) if (spill_reg_order[r] >= 0) for (i = 0; i < N_REG_CLASSES; i++) if (TEST_HARD_REG_BIT (reg_class_contents[i], r)) { if (insn_needs[i] > 0) insn_needs[i]++; if (insn_groups[i] > 0 && nregs > 1) inc_groups = 1; } if (inc_groups) insn_groups[i]++; } /* For each class, collect maximum need of any insn. */ for (i = 0; i < N_REG_CLASSES; i++) { if (max_needs[i] < insn_needs[i]) max_needs[i] = insn_needs[i]; if (max_groups[i] < insn_groups[i]) max_groups[i] = insn_groups[i]; if (insn_total_groups > 0) if (max_nongroups[i] < insn_needs[i]) max_nongroups[i] = insn_needs[i]; } } /* Note that there is a continue statement above. */ } /* Now deduct from the needs for the registers already available (already spilled). */ bzero (counted_for_groups, sizeof counted_for_groups); bzero (counted_for_nongroups, sizeof counted_for_nongroups); /* Find all consecutive groups of spilled registers and mark each group off against the need for such groups. */ for (i = 0; i < N_REG_CLASSES; i++) if (group_size[i] > 1) { char regmask[FIRST_PSEUDO_REGISTER]; int j; bzero (regmask, sizeof regmask); /* Make a mask of all the regs that are spill regs in class I. */ for (j = 0; j < n_spills; j++) if (TEST_HARD_REG_BIT (reg_class_contents[i], spill_regs[j]) && !counted_for_groups[spill_regs[j]]) regmask[spill_regs[j]] = 1; /* Find each consecutive group of them. */ for (j = 0; j < FIRST_PSEUDO_REGISTER && max_groups[i] > 0; j++) if (regmask[j] && j + group_size[i] <= FIRST_PSEUDO_REGISTER /* Next line in case group-mode for this class demands an even-odd pair. */ && HARD_REGNO_MODE_OK (j, group_mode[i])) { int k; for (k = 1; k < group_size[i]; k++) if (! regmask[j + k]) break; if (k == group_size[i]) { /* We found a group. Mark it off against this class's need for groups, and against each superclass too. */ register enum reg_class *p; max_groups[i]--; p = reg_class_superclasses[i]; while (*p != LIM_REG_CLASSES) max_groups[(int) *p++]--; /* Don't count these registers again. */ counted_for_groups[j] = 1; for (k = 1; k < group_size[i]; k++) counted_for_groups[j + k] = 1; } j += k; } } /* Now count all remaining spill regs against the individual need. Those that weren't counted_for_groups in groups can also count against the not-in-group need. */ for (i = 0; i < n_spills; i++) { register enum reg_class *p; class = (int) REGNO_REG_CLASS (spill_regs[i]);
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -