📄 fold-const.c
字号:
class = '1'; *save_p = 1; }#endif switch (class) { case '1': return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p); case '2': return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p) && twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2, save_p)); case 'c': return 1; case 'e': if (code == COND_EXPR) return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p) && twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2, save_p) && twoval_comparison_p (TREE_OPERAND (arg, 2), cval1, cval2, save_p)); return 0; case '<': /* First see if we can handle the first operand, then the second. For the second operand, we know *CVAL1 can't be zero. It must be that one side of the comparison is each of the values; test for the case where this isn't true by failing if the two operands are the same. */ if (operand_equal_p (TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1), 0)) return 0; if (*cval1 == 0) *cval1 = TREE_OPERAND (arg, 0); else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0)) ; else if (*cval2 == 0) *cval2 = TREE_OPERAND (arg, 0); else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0)) ; else return 0; if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0)) ; else if (*cval2 == 0) *cval2 = TREE_OPERAND (arg, 1); else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0)) ; else return 0; return 1; } return 0;}/* ARG is a tree that is known to contain just arithmetic operations and comparisons. Evaluate the operations in the tree substituting NEW0 for any occurrence of OLD0 as an operand of a comparison and likewise for NEW1 and OLD1. */static treeeval_subst (arg, old0, new0, old1, new1) tree arg; tree old0, new0, old1, new1;{ tree type = TREE_TYPE (arg); enum tree_code code = TREE_CODE (arg); char class = TREE_CODE_CLASS (code); /* We can handle some of the 'e' cases here. */ if (class == 'e' && code == TRUTH_NOT_EXPR) class = '1'; else if (class == 'e' && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)) class = '2'; switch (class) { case '1': return fold (build1 (code, type, eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1))); case '2': return fold (build (code, type, eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1), eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1))); case 'e': switch (code) { case SAVE_EXPR: return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1); case COMPOUND_EXPR: return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1); case COND_EXPR: return fold (build (code, type, eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1), eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1), eval_subst (TREE_OPERAND (arg, 2), old0, new0, old1, new1))); } case '<': { tree arg0 = TREE_OPERAND (arg, 0); tree arg1 = TREE_OPERAND (arg, 1); /* We need to check both for exact equality and tree equality. The former will be true if the operand has a side-effect. In that case, we know the operand occurred exactly once. */ if (arg0 == old0 || operand_equal_p (arg0, old0, 0)) arg0 = new0; else if (arg0 == old1 || operand_equal_p (arg0, old1, 0)) arg0 = new1; if (arg1 == old0 || operand_equal_p (arg1, old0, 0)) arg1 = new0; else if (arg1 == old1 || operand_equal_p (arg1, old1, 0)) arg1 = new1; return fold (build (code, type, arg0, arg1)); } } return arg;}/* Return a tree for the case when the result of an expression is RESULT converted to TYPE and OMITTED was previously an operand of the expression but is now not needed (e.g., we folded OMITTED * 0). If OMITTED has side effects, we must evaluate it. Otherwise, just do the conversion of RESULT to TYPE. */static treeomit_one_operand (type, result, omitted) tree type, result, omitted;{ tree t = convert (type, result); if (TREE_SIDE_EFFECTS (omitted)) return build (COMPOUND_EXPR, type, omitted, t); return non_lvalue (t);}/* Similar, but call pedantic_non_lvalue instead of non_lvalue. */static treepedantic_omit_one_operand (type, result, omitted) tree type, result, omitted;{ tree t = convert (type, result); if (TREE_SIDE_EFFECTS (omitted)) return build (COMPOUND_EXPR, type, omitted, t); return pedantic_non_lvalue (t);}/* Return a simplified tree node for the truth-negation of ARG. This never alters ARG itself. We assume that ARG is an operation that returns a truth value (0 or 1). */treeinvert_truthvalue (arg) tree arg;{ tree type = TREE_TYPE (arg); enum tree_code code = TREE_CODE (arg); if (code == ERROR_MARK) return arg; /* If this is a comparison, we can simply invert it, except for floating-point non-equality comparisons, in which case we just enclose a TRUTH_NOT_EXPR around what we have. */ if (TREE_CODE_CLASS (code) == '<') { if (FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0))) && code != NE_EXPR && code != EQ_EXPR) return build1 (TRUTH_NOT_EXPR, type, arg); else return build (invert_tree_comparison (code), type, TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1)); } switch (code) { case INTEGER_CST: return convert (type, build_int_2 (TREE_INT_CST_LOW (arg) == 0 && TREE_INT_CST_HIGH (arg) == 0, 0)); case TRUTH_AND_EXPR: return build (TRUTH_OR_EXPR, type, invert_truthvalue (TREE_OPERAND (arg, 0)), invert_truthvalue (TREE_OPERAND (arg, 1))); case TRUTH_OR_EXPR: return build (TRUTH_AND_EXPR, type, invert_truthvalue (TREE_OPERAND (arg, 0)), invert_truthvalue (TREE_OPERAND (arg, 1))); case TRUTH_XOR_EXPR: /* Here we can invert either operand. We invert the first operand unless the second operand is a TRUTH_NOT_EXPR in which case our result is the XOR of the first operand with the inside of the negation of the second operand. */ if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR) return build (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0), TREE_OPERAND (TREE_OPERAND (arg, 1), 0)); else return build (TRUTH_XOR_EXPR, type, invert_truthvalue (TREE_OPERAND (arg, 0)), TREE_OPERAND (arg, 1)); case TRUTH_ANDIF_EXPR: return build (TRUTH_ORIF_EXPR, type, invert_truthvalue (TREE_OPERAND (arg, 0)), invert_truthvalue (TREE_OPERAND (arg, 1))); case TRUTH_ORIF_EXPR: return build (TRUTH_ANDIF_EXPR, type, invert_truthvalue (TREE_OPERAND (arg, 0)), invert_truthvalue (TREE_OPERAND (arg, 1))); case TRUTH_NOT_EXPR: return TREE_OPERAND (arg, 0); case COND_EXPR: return build (COND_EXPR, type, TREE_OPERAND (arg, 0), invert_truthvalue (TREE_OPERAND (arg, 1)), invert_truthvalue (TREE_OPERAND (arg, 2))); case COMPOUND_EXPR: return build (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0), invert_truthvalue (TREE_OPERAND (arg, 1))); case NON_LVALUE_EXPR: return invert_truthvalue (TREE_OPERAND (arg, 0)); case NOP_EXPR: case CONVERT_EXPR: case FLOAT_EXPR: return build1 (TREE_CODE (arg), type, invert_truthvalue (TREE_OPERAND (arg, 0))); case BIT_AND_EXPR: if (!integer_onep (TREE_OPERAND (arg, 1))) break; return build (EQ_EXPR, type, arg, convert (type, integer_zero_node)); case SAVE_EXPR: return build1 (TRUTH_NOT_EXPR, type, arg); case CLEANUP_POINT_EXPR: return build1 (CLEANUP_POINT_EXPR, type, invert_truthvalue (TREE_OPERAND (arg, 0))); } if (TREE_CODE (TREE_TYPE (arg)) != BOOLEAN_TYPE) abort (); return build1 (TRUTH_NOT_EXPR, type, arg);}/* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both operands are another bit-wise operation with a common input. If so, distribute the bit operations to save an operation and possibly two if constants are involved. For example, convert (A | B) & (A | C) into A | (B & C) Further simplification will occur if B and C are constants. If this optimization cannot be done, 0 will be returned. */static treedistribute_bit_expr (code, type, arg0, arg1) enum tree_code code; tree type; tree arg0, arg1;{ tree common; tree left, right; if (TREE_CODE (arg0) != TREE_CODE (arg1) || TREE_CODE (arg0) == code || (TREE_CODE (arg0) != BIT_AND_EXPR && TREE_CODE (arg0) != BIT_IOR_EXPR)) return 0; if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0)) { common = TREE_OPERAND (arg0, 0); left = TREE_OPERAND (arg0, 1); right = TREE_OPERAND (arg1, 1); } else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0)) { common = TREE_OPERAND (arg0, 0); left = TREE_OPERAND (arg0, 1); right = TREE_OPERAND (arg1, 0); } else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0)) { common = TREE_OPERAND (arg0, 1); left = TREE_OPERAND (arg0, 0); right = TREE_OPERAND (arg1, 1); } else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0)) { common = TREE_OPERAND (arg0, 1); left = TREE_OPERAND (arg0, 0); right = TREE_OPERAND (arg1, 0); } else return 0; return fold (build (TREE_CODE (arg0), type, common, fold (build (code, type, left, right))));}/* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER starting at BITPOS. The field is unsigned if UNSIGNEDP is non-zero. */static treemake_bit_field_ref (inner, type, bitsize, bitpos, unsignedp) tree inner; tree type; int bitsize, bitpos; int unsignedp;{ tree result = build (BIT_FIELD_REF, type, inner, size_int (bitsize), size_int (bitpos)); TREE_UNSIGNED (result) = unsignedp; return result;}/* Optimize a bit-field compare. There are two cases: First is a compare against a constant and the second is a comparison of two items where the fields are at the same bit position relative to the start of a chunk (byte, halfword, word) large enough to contain it. In these cases we can avoid the shift implicit in bitfield extractions. For constants, we emit a compare of the shifted constant with the BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being compared. For two fields at the same position, we do the ANDs with the similar mask and compare the result of the ANDs. CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR. COMPARE_TYPE is the type of the comparison, and LHS and RHS are the left and right operands of the comparison, respectively. If the optimization described above can be done, we return the resulting tree. Otherwise we return zero. */static treeoptimize_bit_field_compare (code, compare_type, lhs, rhs) enum tree_code code; tree compare_type; tree lhs, rhs;{ int lbitpos, lbitsize, rbitpos, rbitsize; int lnbitpos, lnbitsize, rnbitpos, rnbitsize; tree type = TREE_TYPE (lhs); tree signed_type, unsigned_type; int const_p = TREE_CODE (rhs) == INTEGER_CST; enum machine_mode lmode, rmode, lnmode, rnmode; int lunsignedp, runsignedp; int lvolatilep = 0, rvolatilep = 0; tree linner, rinner; tree mask; tree offset; /* Get all the information about the extractions being done. If the bit size if the same as the size of the underlying object, we aren't doing an extraction at all and so can do nothing. */ linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode, &lunsignedp, &lvolatilep); if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0 || offset != 0) return 0; if (!const_p) { /* If this is not a constant, we can only do something if bit positions, sizes, and signedness are the same. */ rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode, &runsignedp, &rvolatilep); if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize || lunsignedp != runsignedp || offset != 0) return 0; } /* See if we can find a mode to refer to this field. We should be able to, but fail if we can't. */ lnmode = get_best_mode (lbitsize, lbitpos, TYPE_ALIGN (TREE_TYPE (linner)), word_mode, lvolatilep); if (lnmode == VOIDmode) return 0; /* Set signed and unsigned types of the precision of this mode for the shifts below. */ signed_type = type_for_mode (lnmode, 0); unsigned_type = type_for_mode (lnmode, 1); if (! const_p) { rnmode = get_best_mode (rbitsize, rbitpos, TYPE_ALIGN (TREE_TYPE (rinner)), word_mode, rvolatilep); if (rnmode == VOIDmode) return 0; } /* Compute the bit position and size for the new reference and our offset within it. If the new reference is the same size as the original, we won't optimize anything, so return zero. */ lnbitsize = GET_MODE_BITSIZE (lnmode); lnbitpos = lbitpos & ~ (lnbitsize - 1); lbitpos -= lnbitpos; if (lnbitsize == lbitsize) return 0; if (! const_p) { rnbitsize = GET_MODE_BITSIZE (rnmode); rnbitpos = rbitpos & ~ (rnbitsize - 1); r
⌨️ 快捷键说明
复制代码
Ctrl + C
搜索代码
Ctrl + F
全屏模式
F11
切换主题
Ctrl + Shift + D
显示快捷键
?
增大字号
Ctrl + =
减小字号
Ctrl + -