?? combine.c
字號:
&& GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT) { int c0 = INTVAL (XEXP (x, 1)); int c1 = INTVAL (XEXP (XEXP (x, 0), 1)); int combined = (code == IOR ? c0 | c1 : c0 ^ c1); if (combined == 0) return XEXP (XEXP (x, 0), 0); if (!undobuf.storage) undobuf.storage = (char *) oballoc (0); SUBST (XEXP (x, 1), gen_rtx (CONST_INT, VOIDmode, combined)); SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0)); break; } case FLOAT: /* (float (sign_extend <X>)) = (float <X>). */ if (was_replaced[0] && GET_CODE (to) == SIGN_EXTEND) SUBST (XEXP (x, 0), XEXP (to, 0)); break; case ZERO_EXTRACT: /* (ZERO_EXTRACT (TRUNCATE x)...) can become (ZERO_EXTRACT x ...). */ if (was_replaced[0] && GET_CODE (to) == TRUNCATE) {#ifdef BITS_BIG_ENDIAN if (GET_CODE (XEXP (x, 2)) == CONST_INT) { if (!undobuf.storage) undobuf.storage = (char *) oballoc (0); /* On a big-endian machine, must increment the bit-number since sign bit is farther away in the pre-truncated value. */ return gen_rtx (ZERO_EXTRACT, GET_MODE (x), XEXP (to, 0), XEXP (x, 1), gen_rtx (CONST_INT, VOIDmode, (INTVAL (XEXP (x, 2)) + GET_MODE_BITSIZE (GET_MODE (XEXP (to, 0))) - GET_MODE_BITSIZE (GET_MODE (to))))); }#else SUBST (XEXP (x, 0), XEXP (to, 0));#endif } /* Extracting a single bit from the result of a shift: see which bit it was before the shift and extract that directly. */ if (was_replaced[0] && (GET_CODE (to) == ASHIFTRT || GET_CODE (to) == LSHIFTRT || GET_CODE (to) == ASHIFT || GET_CODE (to) == LSHIFT) && GET_CODE (XEXP (to, 1)) == CONST_INT && XEXP (x, 1) == const1_rtx && GET_CODE (XEXP (x, 2)) == CONST_INT) { int shift = INTVAL (XEXP (to, 1)); int newpos; if (GET_CODE (to) == ASHIFT || GET_CODE (to) == LSHIFT) shift = - shift;#ifdef BITS_BIG_ENDIAN shift = - shift;#endif newpos = INTVAL (XEXP (x, 2)) + shift; if (newpos >= 0 && newpos < GET_MODE_BITSIZE (GET_MODE (to))) { if (!undobuf.storage) undobuf.storage = (char *) oballoc (0); return gen_rtx (ZERO_EXTRACT, GET_MODE (x), XEXP (to, 0), const1_rtx, gen_rtx (CONST_INT, VOIDmode, newpos)); } } break; case LSHIFTRT: case ASHIFTRT: case ROTATE: case ROTATERT:#ifdef SHIFT_COUNT_TRUNCATED /* (lshift <X> (sign_extend <Y>)) = (lshift <X> <Y>) (most machines). True for all kinds of shifts and also for zero_extend. */ if (was_replaced[1] && (GET_CODE (to) == SIGN_EXTEND || GET_CODE (to) == ZERO_EXTEND) && FAKE_EXTEND_SAFE_P (GET_MODE (to), XEXP (to, 0))) { if (!undobuf.storage) undobuf.storage = (char *) oballoc (0); SUBST (XEXP (x, 1), /* This is a perverse SUBREG, wider than its base. */ gen_lowpart_for_combine (GET_MODE (to), XEXP (to, 0))); }#endif /* Two shifts in a row of same kind in same direction with constant counts may be combined. */ if (was_replaced[0] && GET_CODE (to) == GET_CODE (x) && GET_CODE (XEXP (x, 1)) == CONST_INT && GET_CODE (XEXP (to, 1)) == CONST_INT && INTVAL (XEXP (to, 1)) > 0 && INTVAL (XEXP (x, 1)) > 0 && (INTVAL (XEXP (x, 1)) + INTVAL (XEXP (to, 1)) < GET_MODE_BITSIZE (GET_MODE (x)))) { if (!undobuf.storage) undobuf.storage = (char *) oballoc (0); return gen_rtx (GET_CODE (x), GET_MODE (x), XEXP (to, 0), gen_rtx (CONST_INT, VOIDmode, INTVAL (XEXP (x, 1)) + INTVAL (XEXP (to, 1)))); } break; case LSHIFT: case ASHIFT:#ifdef SHIFT_COUNT_TRUNCATED /* (lshift <X> (sign_extend <Y>)) = (lshift <X> <Y>) (most machines). True for all kinds of shifts and also for zero_extend. */ if (was_replaced[1] && (GET_CODE (to) == SIGN_EXTEND || GET_CODE (to) == ZERO_EXTEND) && FAKE_EXTEND_SAFE_P (GET_MODE (to), XEXP (to, 0))) { if (!undobuf.storage) undobuf.storage = (char *) oballoc (0); SUBST (XEXP (x, 1), /* This is a perverse SUBREG, wider than its base. */ gen_lowpart_for_combine (GET_MODE (to), XEXP (to, 0))); }#endif /* (lshift (and (lshiftrt <foo> <X>) <Y>) <X>) happens copying between bit fields in similar structures. It can be replaced by one and instruction. It does not matter whether the shifts are logical or arithmetic. */ if (GET_CODE (XEXP (x, 0)) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) > 0 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT && XEXP (XEXP (x, 0), 0) == to && (GET_CODE (to) == LSHIFTRT || GET_CODE (to) == ASHIFTRT)#if 0/* I now believe this restriction is unnecessary. The outer shift will discard those bits in any case, right? */ /* If inner shift is arithmetic, either it shifts left or the bits it shifts the sign into are zeroed by the and. */ && (INTVAL (XEXP (x, 1)) < 0 || ((unsigned) INTVAL (XEXP (XEXP (x, 0), 1)) < 1 << (GET_MODE_BITSIZE (GET_MODE (x)) - INTVAL (XEXP (x, 0)))))#endif && GET_CODE (XEXP (to, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) == INTVAL (XEXP (to, 1))) { if (!undobuf.storage) undobuf.storage = (char *) oballoc (0); /* The constant in the new `and' is <Y> << <X> but clear out all bits that don't belong in our mode. */ return gen_rtx (AND, GET_MODE (x), XEXP (to, 0), gen_rtx (CONST_INT, VOIDmode, (GET_MODE_MASK (GET_MODE (x)) & ((GET_MODE_MASK (GET_MODE (x)) & INTVAL (XEXP (XEXP (x, 0), 1))) << INTVAL (XEXP (x, 1)))))); } /* Two shifts in a row in same direction with constant counts may be combined. */ if (was_replaced[0] && (GET_CODE (to) == ASHIFT || GET_CODE (to) == LSHIFT) && GET_CODE (XEXP (x, 1)) == CONST_INT && GET_CODE (XEXP (to, 1)) == CONST_INT && INTVAL (XEXP (to, 1)) > 0 && INTVAL (XEXP (x, 1)) > 0 && (INTVAL (XEXP (x, 1)) + INTVAL (XEXP (to, 1)) < GET_MODE_BITSIZE (GET_MODE (x)))) { if (!undobuf.storage) undobuf.storage = (char *) oballoc (0); return gen_rtx (GET_CODE (x), GET_MODE (x), XEXP (to, 0), gen_rtx (CONST_INT, VOIDmode, INTVAL (XEXP (x, 1)) + INTVAL (XEXP (to, 1)))); } /* (ashift (ashiftrt <foo> <X>) <X>) (or, on some machines, (ashift (ashift <foo> <-X>) <X>) instead) happens if you divide by 2**N and then multiply by 2**N. It can be replaced by one `and' instruction. It does not matter whether the shifts are logical or arithmetic. */ if (GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) > 0 && was_replaced[0] && (((GET_CODE (to) == LSHIFTRT || GET_CODE (to) == ASHIFTRT) && GET_CODE (XEXP (to, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) == INTVAL (XEXP (to, 1))) || ((GET_CODE (to) == LSHIFT || GET_CODE (to) == ASHIFT) && GET_CODE (XEXP (to, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) == - INTVAL (XEXP (to, 1))))) { if (!undobuf.storage) undobuf.storage = (char *) oballoc (0); /* The constant in the new `and' is -1 << <X> but clear out all bits that don't belong in our mode. */ return gen_rtx (AND, GET_MODE (x), XEXP (to, 0), gen_rtx (CONST_INT, VOIDmode, (GET_MODE_MASK (GET_MODE (x)) & (GET_MODE_MASK (GET_MODE (x)) << INTVAL (XEXP (x, 1)))))); } } return x;}/* This is the AND case of the function subst. */static rtxsimplify_and_const_int (x, to) rtx x, to;{ register rtx varop = XEXP (x, 0); register int constop = INTVAL (XEXP (x, 1)); /* (and (subreg (and <foo> <constant>) 0) <constant>) results from an andsi followed by an andqi, which happens frequently when storing bit-fields on something whose result comes from an andsi. */ if (GET_CODE (varop) == SUBREG && XEXP (varop, 0) == to && subreg_lowpart_p (varop) && GET_CODE (to) == AND && GET_CODE (XEXP (to, 1)) == CONST_INT /* Verify that the result of the outer `and' is not affected by any bits not defined in the inner `and'. True if the outer mode is narrower, or if the outer constant masks to zero all the bits that the inner mode doesn't have. */ && (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (GET_MODE (to)) || (constop & ~ GET_MODE_MASK (GET_MODE (to))) == 0)) { if (!undobuf.storage) undobuf.storage = (char *) oballoc (0); return gen_rtx (AND, GET_MODE (x), gen_lowpart_for_combine (GET_MODE (x), XEXP (to, 0)), gen_rtx (CONST_INT, VOIDmode, constop /* Remember that the bits outside that mode are not being changed, so the effect is as if they were all 1. */ & INTVAL (XEXP (to, 1)))); } /* (and:SI (zero_extract:SI ...) <constant>) results from an andsi following a byte-fetch on risc machines. When the constant includes all bits extracted, eliminate the `and'. */ if (GET_CODE (varop) == ZERO_EXTRACT && GET_CODE (XEXP (varop, 1)) == CONST_INT /* The `and' must not clear any bits that the extract can give. */ && (~ constop & ((1 << INTVAL (XEXP (varop, 1))) - 1)) == 0) return varop; /* (and (zero_extend <foo>) <constant>) often results from storing in a bit-field something that was calculated as a short. Replace with a single `and' in whose constant all bits not in <foo>'s mode are zero. */ if (varop == to && GET_CODE (to) == ZERO_EXTEND && FAKE_EXTEND_SAFE_P (GET_MODE (x), XEXP (to, 0))) { if (!undobuf.storage) undobuf.storage = (char *) oballoc (0); return gen_rtx (AND, GET_MODE (x), /* This is a perverse SUBREG, wider than its base. */ gen_lowpart_for_combine (GET_MODE (x), XEXP (to, 0)), gen_rtx (CONST_INT, VOIDmode, constop & GET_MODE_MASK (GET_MODE (XEXP (to, 0))))); } /* (and (sign_extend <foo>) <constant>) can be replaced with (and (subreg <foo>) <constant>) if <constant> is narrower than <foo>'s mode, or with (zero_extend <foo>) if <constant> is a mask for that mode. */ if (varop == to && GET_CODE (to) == SIGN_EXTEND && ((unsigned) constop <= GET_MODE_MASK (GET_MODE (XEXP (to, 0)))) && FAKE_EXTEND_SAFE_P (GET_MODE (x), XEXP (to, 0))) { if (!undobuf.storage) undobuf.storage = (char *) oballoc (0); if (constop == GET_MODE_MASK (GET_MODE (XEXP (to, 0)))) return gen_rtx (ZERO_EXTEND, GET_MODE (x), XEXP (to, 0)); return gen_rtx (AND, GET_MODE (x), /* This is a perverse SUBREG, wider than its base. */ gen_lowpart_for_combine (GET_MODE (x), XEXP (to, 0)), XEXP (x, 1)); } /* (and (and <foo> <constant>) <constant>) comes from two and instructions in a row. */ if (varop == to && GET_CODE (to) == AND && GET_CODE (XEXP (to, 1)) == CONST_INT) { if (!undobuf.storage) undobuf.storage = (char *) oballoc (0); return gen_rtx (AND, GET_MODE (x), XEXP (to, 0), gen_rtx (CONST_INT, VOIDmode, constop & INTVAL (XEXP (to, 1)))); } /* (and (ashiftrt (ashift FOO N) N) CONST) may be simplified to (and FOO CONST) if CONST masks off the bits changed by the two shifts. */ if (GET_CODE (varop) == ASHIFTRT && GET_CODE (XEXP (varop, 1)) == CONST_INT && XEXP (varop, 0) == to && GET_CODE (to) == ASHIFT && GET_CODE (XEXP (to, 1)) == CONST_INT && INTVAL (XEXP (varop, 1)) == INTVAL (XEXP (to, 1)) && ((unsigned) constop >> INTVAL (XEXP (varop, 1))) == 0) { if (!undobuf.storage) undobuf.storage = (char *) oballoc (0); /* If CONST is a mask for the low byte, change this into a zero-extend instruction from just the low byte of FOO. */ if (constop == GET_MODE_MASK (QImode)) { rtx temp = gen_lowpart_for_combine (QImode, XEXP (to, 0)); if (GET_CODE (temp) != CLOBBER) return gen_rtx (ZERO_EXTEND, GET_MODE (x), temp); } return gen_rtx (AND, GET_MODE (x), XEXP (to, 0), XEXP (x, 1)); } /* (and (ashiftrt (zero_extend FOO) N) CONST) may be simplified to (and (ashiftrt (subreg FOO) N) CONST) if CONST masks off the bits changed by extension. */ if ((GET_CODE (varop) == ASHIFTRT || GET_CODE (varop) == LSHIFTRT) && GET_CODE (XEXP (varop, 1)) == CONST_INT && XEXP (varop, 0) == to && (GET_CODE (to) == ZERO_EXTEND || GET_CODE (to) == SIGN_EXTEND) /* Verify the and discards all the extended bits. */ && (((unsigned) constop << INTVAL (XEXP (varop, 1))) >> GET_MODE_BITSIZE (GET_MODE (XEXP (to, 0)))) == 0 && FAKE_EXTEND_SAFE_P (GET_MODE (x), XEXP (to, 0))) { if (!undobuf.storage) undobuf.storage = (char *) oballoc (0); SUBST (XEXP (varop, 0), gen_lowpart_for_combine (GET_MODE (x), XEXP (to, 0))); return x; } /* (and x const) may be converted to (zero_extend (subreg x 0)). */ if (constop == GET_MODE_MASK (QImode) && GET_CODE (varop) == REG) { if (!undobuf.storage) undobuf.storage = (char *) oballoc (0); return gen_rtx (ZERO_EXTEND, GET_MODE (x), gen_rtx (SUBREG, QImode, varop, 0)); } if (constop == GET_MODE_MASK (HImode) && GET_CODE (varop) == REG) { if (!undobuf.storage) undobuf.storage = (char *) oballoc (0); return gen_rtx (ZERO_EXTEND, GET_MODE (x), gen_rtx (SUBREG, HImode, varop, 0)); } /* No simplification applies. */ return 0;}/* Like gen_lowpart but for use by combine. In comb
?? 快捷鍵說明
復制代碼
Ctrl + C
搜索代碼
Ctrl + F
全屏模式
F11
切換主題
Ctrl + Shift + D
顯示快捷鍵
?
增大字號
Ctrl + =
減小字號
Ctrl + -