?? loop.c
字號(hào):
else { temp = find_reg_note (p, REG_EQUAL, NULL_RTX); if (temp && CONSTANT_P (XEXP (temp, 0))) m->set_src = XEXP (temp, 0), m->move_insn = 1; else m->move_insn = 0; } m->is_equiv = (find_reg_note (p, REG_EQUIV, NULL_RTX) != 0); } } /* If this register is always set within a STRICT_LOW_PART or set to zero, then its high bytes are constant. So clear them outside the loop and within the loop just load the low bytes. We must check that the machine has an instruction to do so. Also, if the value loaded into the register depends on the same register, this cannot be done. */ else if (SET_SRC (set) == const0_rtx && GET_CODE (NEXT_INSN (p)) == INSN && (set1 = single_set (NEXT_INSN (p))) && GET_CODE (set1) == SET && (GET_CODE (SET_DEST (set1)) == STRICT_LOW_PART) && (GET_CODE (XEXP (SET_DEST (set1), 0)) == SUBREG) && (SUBREG_REG (XEXP (SET_DEST (set1), 0)) == SET_DEST (set)) && !reg_mentioned_p (SET_DEST (set), SET_SRC (set1))) { register int regno = REGNO (SET_DEST (set)); if (n_times_set[regno] == 2) { register struct movable *m; m = (struct movable *) alloca (sizeof (struct movable)); m->next = 0; m->insn = p; m->set_dest = SET_DEST (set); m->dependencies = 0; m->force = 0; m->consec = 0; m->done = 0; m->forces = 0; m->move_insn = 0; m->partial = 1; /* If the insn may not be executed on some cycles, we can't clear the whole reg; clear just high part. Not even if the reg is used only within this loop. Consider this: while (1) while (s != t) { if (foo ()) x = *s; use (x); } Clearing x before the inner loop could clobber a value being saved from the last time around the outer loop. However, if the reg is not used outside this loop and all uses of the register are in the same basic block as the store, there is no problem. If this insn was made by loop, we don't know its INSN_LUID and hence must make a conservative assumption. */ m->global = (INSN_UID (p) >= max_uid_for_loop || (uid_luid[regno_last_uid[regno]] > INSN_LUID (end)) || (uid_luid[regno_first_uid[regno]] < INSN_LUID (p)) || (labels_in_range_p (p, uid_luid[regno_first_uid[regno]]))); if (maybe_never && m->global) m->savemode = GET_MODE (SET_SRC (set1)); else m->savemode = VOIDmode; m->regno = regno; m->cond = 0; m->match = 0; m->lifetime = (uid_luid[regno_last_uid[regno]] - uid_luid[regno_first_uid[regno]]); m->savings = 1; n_times_set[regno] = -1; /* Add M to the end of the chain MOVABLES. */ if (movables == 0) movables = m; else last_movable->next = m; last_movable = m; } } } /* Past a call insn, we get to insns which might not be executed because the call might exit. This matters for insns that trap. Call insns inside a REG_LIBCALL/REG_RETVAL block always return, so they don't count. */ else if (GET_CODE (p) == CALL_INSN && ! in_libcall) call_passed = 1; /* Past a label or a jump, we get to insns for which we can't count on whether or how many times they will be executed during each iteration. Therefore, we can only move out sets of trivial variables (those not used after the loop). */ /* This code appears in three places, once in scan_loop, and twice in strength_reduce. */ else if ((GET_CODE (p) == CODE_LABEL || GET_CODE (p) == JUMP_INSN) /* If we enter the loop in the middle, and scan around to the beginning, don't set maybe_never for that. This must be an unconditional jump, otherwise the code at the top of the loop might never be executed. Unconditional jumps are followed a by barrier then loop end. */ && ! (GET_CODE (p) == JUMP_INSN && JUMP_LABEL (p) == loop_top && NEXT_INSN (NEXT_INSN (p)) == end && simplejump_p (p))) maybe_never = 1; else if (GET_CODE (p) == NOTE) { /* At the virtual top of a converted loop, insns are again known to be executed: logically, the loop begins here even though the exit code has been duplicated. */ if (NOTE_LINE_NUMBER (p) == NOTE_INSN_LOOP_VTOP && loop_depth == 0) maybe_never = call_passed = 0; else if (NOTE_LINE_NUMBER (p) == NOTE_INSN_LOOP_BEG) loop_depth++; else if (NOTE_LINE_NUMBER (p) == NOTE_INSN_LOOP_END) loop_depth--; } } /* If one movable subsumes another, ignore that other. */ ignore_some_movables (movables); /* For each movable insn, see if the reg that it loads leads when it dies right into another conditionally movable insn. If so, record that the second insn "forces" the first one, since the second can be moved only if the first is. */ force_movables (movables); /* See if there are multiple movable insns that load the same value. If there are, make all but the first point at the first one through the `match' field, and add the priorities of them all together as the priority of the first. */ combine_movables (movables, nregs); /* Now consider each movable insn to decide whether it is worth moving. Store 0 in n_times_set for each reg that is moved. */ move_movables (movables, threshold, insn_count, loop_start, end, nregs); /* Now candidates that still are negative are those not moved. Change n_times_set to indicate that those are not actually invariant. */ for (i = 0; i < nregs; i++) if (n_times_set[i] < 0) n_times_set[i] = n_times_used[i]; if (flag_strength_reduce) strength_reduce (scan_start, end, loop_top, insn_count, loop_start, end);}/* Add elements to *OUTPUT to record all the pseudo-regs mentioned in IN_THIS but not mentioned in NOT_IN_THIS. */voidrecord_excess_regs (in_this, not_in_this, output) rtx in_this, not_in_this; rtx *output;{ enum rtx_code code; char *fmt; int i; code = GET_CODE (in_this); switch (code) { case PC: case CC0: case CONST_INT: case CONST_DOUBLE: case CONST: case SYMBOL_REF: case LABEL_REF: return; case REG: if (REGNO (in_this) >= FIRST_PSEUDO_REGISTER && ! reg_mentioned_p (in_this, not_in_this)) *output = gen_rtx (EXPR_LIST, VOIDmode, in_this, *output); return; } fmt = GET_RTX_FORMAT (code); for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--) { int j; switch (fmt[i]) { case 'E': for (j = 0; j < XVECLEN (in_this, i); j++) record_excess_regs (XVECEXP (in_this, i, j), not_in_this, output); break; case 'e': record_excess_regs (XEXP (in_this, i), not_in_this, output); break; } }}/* Check what regs are referred to in the libcall block ending with INSN, aside from those mentioned in the equivalent value. If there are none, return 0. If there are one or more, return an EXPR_LIST containing all of them. */static rtxlibcall_other_reg (insn, equiv) rtx insn, equiv;{ rtx note = find_reg_note (insn, REG_RETVAL, NULL_RTX); rtx p = XEXP (note, 0); rtx output = 0; /* First, find all the regs used in the libcall block that are not mentioned as inputs to the result. */ while (p != insn) { if (GET_CODE (p) == INSN || GET_CODE (p) == JUMP_INSN || GET_CODE (p) == CALL_INSN) record_excess_regs (PATTERN (p), equiv, &output); p = NEXT_INSN (p); } return output;}/* Return 1 if all uses of REG are between INSN and the end of the basic block. */static int reg_in_basic_block_p (insn, reg) rtx insn, reg;{ int regno = REGNO (reg); rtx p; if (regno_first_uid[regno] != INSN_UID (insn)) return 0; /* Search this basic block for the already recorded last use of the reg. */ for (p = insn; p; p = NEXT_INSN (p)) { switch (GET_CODE (p)) { case NOTE: break; case INSN: case CALL_INSN: /* Ordinary insn: if this is the last use, we win. */ if (regno_last_uid[regno] == INSN_UID (p)) return 1; break; case JUMP_INSN: /* Jump insn: if this is the last use, we win. */ if (regno_last_uid[regno] == INSN_UID (p)) return 1; /* Otherwise, it's the end of the basic block, so we lose. */ return 0; case CODE_LABEL: case BARRIER: /* It's the end of the basic block, so we lose. */ return 0; } } /* The "last use" doesn't follow the "first use"?? */ abort ();}/* Compute the benefit of eliminating the insns in the block whose last insn is LAST. This may be a group of insns used to compute a value directly or can contain a library call. */static intlibcall_benefit (last) rtx last;{ rtx insn; int benefit = 0; for (insn = XEXP (find_reg_note (last, REG_RETVAL, NULL_RTX), 0); insn != last; insn = NEXT_INSN (insn)) { if (GET_CODE (insn) == CALL_INSN) benefit += 10; /* Assume at least this many insns in a library routine. */ else if (GET_CODE (insn) == INSN && GET_CODE (PATTERN (insn)) != USE && GET_CODE (PATTERN (insn)) != CLOBBER) benefit++; } return benefit;}/* Skip COUNT insns from INSN, counting library calls as 1 insn. */static rtxskip_consec_insns (insn, count) rtx insn; int count;{ for (; count > 0; count--) { rtx temp; /* If first insn of libcall sequence, skip to end. */ /* Do this at start of loop, since INSN is guaranteed to be an insn here. */ if (GET_CODE (insn) != NOTE && (temp = find_reg_note (insn, REG_LIBCALL, NULL_RTX))) insn = XEXP (temp, 0); do insn = NEXT_INSN (insn); while (GET_CODE (insn) == NOTE); } return insn;}/* Ignore any movable whose insn falls within a libcall which is part of another movable. We make use of the fact that the movable for the libcall value was made later and so appears later on the chain. */static voidignore_some_movables (movables) struct movable *movables;{ register struct movable *m, *m1; for (m = movables; m; m = m->next) { /* Is this a movable for the value of a libcall? */ rtx note = find_reg_note (m->insn, REG_RETVAL, NULL_RTX); if (note) { rtx insn; /* Check for earlier movables inside that range, and mark them invalid. We cannot use LUIDs here because insns created by loop.c for prior loops don't have LUIDs. Rather than reject all such insns from movables, we just explicitly check each insn in the libcall (since invariant libcalls aren't that common). */ for (insn = XEXP (note, 0); insn != m->insn; insn = NEXT_INSN (insn)) for (m1 = movables; m1 != m; m1 = m1->next) if (m1->insn == insn) m1->done = 1; } }} /* For each movable insn, see if the reg that it loads leads when it dies right into another conditionally movable insn. If so, record that the second insn "forces" the first one, since the second can be moved only if the first is. */static voidforce_movables (movables) struct movable *movables;{ register struct movable *m, *m1; for (m1 = movables; m1; m1 = m1->next) /* Omit this if moving just the (SET (REG) 0) of a zero-extend. */ if (!m1->partial && !m1->done) { int regno = m1->regno; for (m = m1->next; m; m = m->next) /* ??? Could this be a bug? What if CSE caused the register of M1 to be used after this insn? Since CSE does not update regno_last_uid, this insn M->insn might not be where it dies. But very likely this doesn't matter; what matters is that M's reg is computed from M1's reg. */ if (INSN_UID (m->insn) == regno_last_uid[regno] && !m->done) break; if (m != 0 && m->set_src == m1->set_dest /* If m->consec, m->set_src isn't valid. */ && m->consec == 0) m = 0; /* Increase the priority of the moving the first insn since it permits the second to be moved as well. */ if (m != 0) { m->forces = m1; m1->lifetime += m->lifetime; m1->savings += m1->savings; } }}
?? 快捷鍵說明
復(fù)制代碼
Ctrl + C
搜索代碼
Ctrl + F
全屏模式
F11
切換主題
Ctrl + Shift + D
顯示快捷鍵
?
增大字號(hào)
Ctrl + =
減小字號(hào)
Ctrl + -