X-Git-Url: http://git.sourceforge.jp/view?a=blobdiff_plain;f=gcc%2Ftree-ssa-dom.c;h=a0d2f28290731c87c519fca33039f540bd006140;hb=b38319aba563f415e57aaf5b9ecaf00a442743e3;hp=dc546d45fafb030197ef2f8b5624568b6688bcec;hpb=99a32a28494ae2dab2c450ba44f3399066c68f39;p=pf3gnuchains%2Fgcc-fork.git diff --git a/gcc/tree-ssa-dom.c b/gcc/tree-ssa-dom.c index dc546d45faf..a0d2f282907 100644 --- a/gcc/tree-ssa-dom.c +++ b/gcc/tree-ssa-dom.c @@ -16,8 +16,8 @@ GNU General Public License for more details. You should have received a copy of the GNU General Public License along with GCC; see the file COPYING. If not, write to -the Free Software Foundation, 59 Temple Place - Suite 330, -Boston, MA 02111-1307, USA. */ +the Free Software Foundation, 51 Franklin Street, Fifth Floor, +Boston, MA 02110-1301, USA. */ #include "config.h" #include "system.h" @@ -31,7 +31,6 @@ Boston, MA 02111-1307, USA. */ #include "basic-block.h" #include "cfgloop.h" #include "output.h" -#include "errors.h" #include "expr.h" #include "function.h" #include "diagnostic.h" @@ -43,6 +42,7 @@ Boston, MA 02111-1307, USA. */ #include "tree-pass.h" #include "tree-ssa-propagate.h" #include "langhooks.h" +#include "params.h" /* This file implements optimizations on the dominator tree. */ @@ -167,6 +167,7 @@ struct opt_stats_d long num_re; long num_const_prop; long num_copy_prop; + long num_iterations; }; static struct opt_stats_d opt_stats; @@ -273,8 +274,7 @@ static void record_cond (tree, tree); static void record_const_or_copy (tree, tree); static void record_equality (tree, tree); static tree update_rhs_and_lookup_avail_expr (tree, tree, bool); -static tree simplify_rhs_and_lookup_avail_expr (struct dom_walk_data *, - tree, int); +static tree simplify_rhs_and_lookup_avail_expr (tree, int); static tree simplify_cond_and_lookup_avail_expr (tree, stmt_ann_t, int); static tree simplify_switch_and_lookup_avail_expr (tree, int); static tree find_equivalent_equality_comparison (tree); @@ -282,8 +282,7 @@ static void record_range (tree, basic_block); static bool extract_range_from_cond (tree, tree *, tree *, int *); static void record_equivalences_from_phis (basic_block); static void record_equivalences_from_incoming_edge (basic_block); -static bool eliminate_redundant_computations (struct dom_walk_data *, - tree, stmt_ann_t); +static bool eliminate_redundant_computations (tree, stmt_ann_t); static void record_equivalences_from_stmt (tree, int, stmt_ann_t); static void thread_across_edge (struct dom_walk_data *, edge); static void dom_opt_finalize_block (struct dom_walk_data *, basic_block); @@ -481,7 +480,11 @@ tree_ssa_dominator_optimize (void) if (cfg_altered) free_dominance_info (CDI_DOMINATORS); - cfg_altered = cleanup_tree_cfg (); + /* Only iterate if we threaded jumps AND the CFG cleanup did + something interesting. Other cases generate far fewer + optimization opportunities and thus are not worth another + full DOM iteration. */ + cfg_altered &= cleanup_tree_cfg (); if (rediscover_loops_after_threading) { @@ -527,6 +530,8 @@ tree_ssa_dominator_optimize (void) if (value && !is_gimple_min_invariant (value)) SSA_NAME_VALUE (name) = NULL; } + + opt_stats.num_iterations++; } while (optimize > 1 && cfg_altered); @@ -604,6 +609,9 @@ thread_across_edge (struct dom_walk_data *walk_data, edge e) block_stmt_iterator bsi; tree stmt = NULL; tree phi; + int stmt_count = 0; + int max_stmt_count; + /* If E->dest does not end with a conditional, then there is nothing to do. */ @@ -633,6 +641,11 @@ thread_across_edge (struct dom_walk_data *walk_data, edge e) tree src = PHI_ARG_DEF_FROM_EDGE (phi, e); tree dst = PHI_RESULT (phi); + /* Do not include virtual PHIs in our statement count as + they never generate code. */ + if (is_gimple_reg (dst)) + stmt_count++; + /* If the desired argument is not the same as this PHI's result and it is set by a PHI in E->dest, then we can not thread through E->dest. */ @@ -660,9 +673,10 @@ thread_across_edge (struct dom_walk_data *walk_data, edge e) Failure to simplify into the form above merely means that the statement provides no equivalences to help simplify later statements. This does not prevent threading through E->dest. */ + max_stmt_count = PARAM_VALUE (PARAM_MAX_JUMP_THREAD_DUPLICATION_STMTS); for (bsi = bsi_start (e->dest); ! bsi_end_p (bsi); bsi_next (&bsi)) { - tree cached_lhs; + tree cached_lhs = NULL; stmt = bsi_stmt (bsi); @@ -670,6 +684,12 @@ thread_across_edge (struct dom_walk_data *walk_data, edge e) if (IS_EMPTY_STMT (stmt) || TREE_CODE (stmt) == LABEL_EXPR) continue; + /* If duplicating this block is going to cause too much code + expansion, then do not thread through this block. */ + stmt_count++; + if (stmt_count > max_stmt_count) + return; + /* Safely handle threading across loop backedges. This is over conservative, but still allows us to capture the majority of the cases where we can thread across a loop @@ -701,7 +721,7 @@ thread_across_edge (struct dom_walk_data *walk_data, edge e) else { /* Copy the operands. */ - tree *copy; + tree *copy, pre_fold_expr; ssa_op_iter iter; use_operand_p use_p; unsigned int num, i = 0; @@ -725,12 +745,31 @@ thread_across_edge (struct dom_walk_data *walk_data, edge e) /* Try to fold/lookup the new expression. Inserting the expression into the hash table is unlikely to help - simplify anything later, so just query the hashtable. */ - cached_lhs = fold (TREE_OPERAND (stmt, 1)); - if (TREE_CODE (cached_lhs) != SSA_NAME - && !is_gimple_min_invariant (cached_lhs)) - cached_lhs = lookup_avail_expr (stmt, false); + Sadly, we have to handle conditional assignments specially + here, because fold expects all the operands of an expression + to be folded before the expression itself is folded, but we + can't just substitute the folded condition here. */ + if (TREE_CODE (TREE_OPERAND (stmt, 1)) == COND_EXPR) + { + tree cond = COND_EXPR_COND (TREE_OPERAND (stmt, 1)); + cond = fold (cond); + if (cond == boolean_true_node) + pre_fold_expr = COND_EXPR_THEN (TREE_OPERAND (stmt, 1)); + else if (cond == boolean_false_node) + pre_fold_expr = COND_EXPR_ELSE (TREE_OPERAND (stmt, 1)); + else + pre_fold_expr = TREE_OPERAND (stmt, 1); + } + else + pre_fold_expr = TREE_OPERAND (stmt, 1); + if (pre_fold_expr) + { + cached_lhs = fold (pre_fold_expr); + if (TREE_CODE (cached_lhs) != SSA_NAME + && !is_gimple_min_invariant (cached_lhs)) + cached_lhs = lookup_avail_expr (stmt, false); + } /* Restore the statement's original uses/defs. */ i = 0; @@ -848,8 +887,6 @@ thread_across_edge (struct dom_walk_data *walk_data, edge e) { struct edge_info *edge_info; - update_bb_profile_for_threading (e->dest, EDGE_FREQUENCY (e), - e->count, taken_edge); if (e->aux) edge_info = e->aux; else @@ -887,7 +924,7 @@ dom_opt_initialize_block (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED, } /* Given an expression EXPR (a relational expression or a statement), - initialize the hash table element pointed by by ELEMENT. */ + initialize the hash table element pointed to by ELEMENT. */ static void initialize_hash_element (tree expr, tree lhs, struct expr_hash_elt *element) @@ -999,14 +1036,14 @@ dom_opt_finalize_block (struct dom_walk_data *walk_data, basic_block bb) { tree last; - /* If we are at a leaf node in the dominator tree, see if we can thread - the edge from BB through its successor. - - Do this before we remove entries from our equivalence tables. */ + /* If we have an outgoing edge to a block with multiple incoming and + outgoing edges, then we may be able to thread the edge. ie, we + may be able to statically determine which of the outgoing edges + will be traversed when the incoming edge from BB is traversed. */ if (single_succ_p (bb) && (single_succ_edge (bb)->flags & EDGE_ABNORMAL) == 0 - && (get_immediate_dominator (CDI_DOMINATORS, single_succ (bb)) != bb - || phi_nodes (single_succ (bb)))) + && !single_pred_p (single_succ (bb)) + && !single_succ_p (single_succ (bb))) { thread_across_edge (walk_data, single_succ_edge (bb)); @@ -1023,10 +1060,9 @@ dom_opt_finalize_block (struct dom_walk_data *walk_data, basic_block bb) extract_true_false_edges_from_block (bb, &true_edge, &false_edge); - /* If the THEN arm is the end of a dominator tree or has PHI nodes, - then try to thread through its edge. */ - if (get_immediate_dominator (CDI_DOMINATORS, true_edge->dest) != bb - || phi_nodes (true_edge->dest)) + /* Only try to thread the edge if it reaches a target block with + more than one predecessor and more than one successor. */ + if (!single_pred_p (true_edge->dest) && !single_succ_p (true_edge->dest)) { struct edge_info *edge_info; unsigned int i; @@ -1073,8 +1109,7 @@ dom_opt_finalize_block (struct dom_walk_data *walk_data, basic_block bb) } /* Similarly for the ELSE arm. */ - if (get_immediate_dominator (CDI_DOMINATORS, false_edge->dest) != bb - || phi_nodes (false_edge->dest)) + if (!single_pred_p (false_edge->dest) && !single_succ_p (false_edge->dest)) { struct edge_info *edge_info; unsigned int i; @@ -1359,6 +1394,9 @@ dump_dominator_optimization_stats (FILE *file) fprintf (file, " Copies propagated: %6ld\n", opt_stats.num_copy_prop); + fprintf (file, "\nTotal number of DOM iterations: %6ld\n", + opt_stats.num_iterations); + fprintf (file, "\nHash table statistics:\n"); fprintf (file, " avail_exprs: "); @@ -1716,8 +1754,7 @@ simple_iv_increment_p (tree stmt) the hash table and return the result. Otherwise return NULL. */ static tree -simplify_rhs_and_lookup_avail_expr (struct dom_walk_data *walk_data, - tree stmt, int insert) +simplify_rhs_and_lookup_avail_expr (tree stmt, int insert) { tree rhs = TREE_OPERAND (stmt, 1); enum tree_code rhs_code = TREE_CODE (rhs); @@ -1757,6 +1794,7 @@ simplify_rhs_and_lookup_avail_expr (struct dom_walk_data *walk_data, assignment. Add minus to this, as we handle it specially below. */ if ((associative_tree_code (rhs_code) || rhs_code == MINUS_EXPR) && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME + && num_imm_uses (TREE_OPERAND (rhs, 0)) == 1 && is_gimple_min_invariant (TREE_OPERAND (rhs, 1))) { tree rhs_def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (rhs, 0)); @@ -1841,127 +1879,6 @@ simplify_rhs_and_lookup_avail_expr (struct dom_walk_data *walk_data, dont_fold_assoc:; } - /* Transform TRUNC_DIV_EXPR and TRUNC_MOD_EXPR into RSHIFT_EXPR - and BIT_AND_EXPR respectively if the first operand is greater - than zero and the second operand is an exact power of two. */ - if ((rhs_code == TRUNC_DIV_EXPR || rhs_code == TRUNC_MOD_EXPR) - && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (rhs, 0))) - && integer_pow2p (TREE_OPERAND (rhs, 1))) - { - tree val; - tree op = TREE_OPERAND (rhs, 0); - - if (TYPE_UNSIGNED (TREE_TYPE (op))) - { - val = integer_one_node; - } - else - { - tree dummy_cond = walk_data->global_data; - - if (! dummy_cond) - { - dummy_cond = build (GT_EXPR, boolean_type_node, - op, integer_zero_node); - dummy_cond = build (COND_EXPR, void_type_node, - dummy_cond, NULL, NULL); - walk_data->global_data = dummy_cond; - } - else - { - TREE_SET_CODE (COND_EXPR_COND (dummy_cond), GT_EXPR); - TREE_OPERAND (COND_EXPR_COND (dummy_cond), 0) = op; - TREE_OPERAND (COND_EXPR_COND (dummy_cond), 1) - = integer_zero_node; - } - val = simplify_cond_and_lookup_avail_expr (dummy_cond, NULL, false); - } - - if (val && integer_onep (val)) - { - tree t; - tree op0 = TREE_OPERAND (rhs, 0); - tree op1 = TREE_OPERAND (rhs, 1); - - if (rhs_code == TRUNC_DIV_EXPR) - t = build (RSHIFT_EXPR, TREE_TYPE (op0), op0, - build_int_cst (NULL_TREE, tree_log2 (op1))); - else - t = build (BIT_AND_EXPR, TREE_TYPE (op0), op0, - local_fold (build (MINUS_EXPR, TREE_TYPE (op1), - op1, integer_one_node))); - - result = update_rhs_and_lookup_avail_expr (stmt, t, insert); - } - } - - /* Transform ABS (X) into X or -X as appropriate. */ - if (rhs_code == ABS_EXPR - && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (rhs, 0)))) - { - tree val; - tree op = TREE_OPERAND (rhs, 0); - tree type = TREE_TYPE (op); - - if (TYPE_UNSIGNED (type)) - { - val = integer_zero_node; - } - else - { - tree dummy_cond = walk_data->global_data; - - if (! dummy_cond) - { - dummy_cond = build (LE_EXPR, boolean_type_node, - op, integer_zero_node); - dummy_cond = build (COND_EXPR, void_type_node, - dummy_cond, NULL, NULL); - walk_data->global_data = dummy_cond; - } - else - { - TREE_SET_CODE (COND_EXPR_COND (dummy_cond), LE_EXPR); - TREE_OPERAND (COND_EXPR_COND (dummy_cond), 0) = op; - TREE_OPERAND (COND_EXPR_COND (dummy_cond), 1) - = build_int_cst (type, 0); - } - val = simplify_cond_and_lookup_avail_expr (dummy_cond, NULL, false); - - if (!val) - { - TREE_SET_CODE (COND_EXPR_COND (dummy_cond), GE_EXPR); - TREE_OPERAND (COND_EXPR_COND (dummy_cond), 0) = op; - TREE_OPERAND (COND_EXPR_COND (dummy_cond), 1) - = build_int_cst (type, 0); - - val = simplify_cond_and_lookup_avail_expr (dummy_cond, - NULL, false); - - if (val) - { - if (integer_zerop (val)) - val = integer_one_node; - else if (integer_onep (val)) - val = integer_zero_node; - } - } - } - - if (val - && (integer_onep (val) || integer_zerop (val))) - { - tree t; - - if (integer_onep (val)) - t = build1 (NEGATE_EXPR, TREE_TYPE (op), op); - else - t = op; - - result = update_rhs_and_lookup_avail_expr (stmt, t, insert); - } - } - /* Optimize *"foo" into 'f'. This is done here rather than in fold to avoid problems with stuff like &*"foo". */ if (TREE_CODE (rhs) == INDIRECT_REF || TREE_CODE (rhs) == ARRAY_REF) @@ -2007,6 +1924,18 @@ find_equivalent_equality_comparison (tree cond) { tree def_rhs = TREE_OPERAND (def_stmt, 1); + + /* If either operand to the comparison is a pointer to + a function, then we can not apply this optimization + as some targets require function pointers to be + canonicalized and in this case this optimization would + eliminate a necessary canonicalization. */ + if ((POINTER_TYPE_P (TREE_TYPE (op0)) + && TREE_CODE (TREE_TYPE (TREE_TYPE (op0))) == FUNCTION_TYPE) + || (POINTER_TYPE_P (TREE_TYPE (op1)) + && TREE_CODE (TREE_TYPE (TREE_TYPE (op1))) == FUNCTION_TYPE)) + return NULL; + /* Now make sure the RHS of the MODIFY_EXPR is a typecast. */ if ((TREE_CODE (def_rhs) == NOP_EXPR || TREE_CODE (def_rhs) == CONVERT_EXPR) @@ -2020,6 +1949,16 @@ find_equivalent_equality_comparison (tree cond) > TYPE_PRECISION (TREE_TYPE (def_rhs))) return NULL; + /* If the inner type of the conversion is a pointer to + a function, then we can not apply this optimization + as some targets require function pointers to be + canonicalized. This optimization would result in + canonicalization of the pointer when it was not originally + needed/intended. */ + if (POINTER_TYPE_P (def_rhs_inner_type) + && TREE_CODE (TREE_TYPE (def_rhs_inner_type)) == FUNCTION_TYPE) + return NULL; + /* What we want to prove is that if we convert OP1 to the type of the object inside the NOP_EXPR that the result is still equivalent to SRC. @@ -2423,7 +2362,7 @@ record_edge_info (basic_block bb) { tree labels = SWITCH_LABELS (stmt); int i, n_labels = TREE_VEC_LENGTH (labels); - tree *info = xcalloc (n_basic_blocks, sizeof (tree)); + tree *info = xcalloc (last_basic_block, sizeof (tree)); edge e; edge_iterator ei; @@ -2603,13 +2542,13 @@ propagate_to_outgoing_edges (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED, table. */ static bool -eliminate_redundant_computations (struct dom_walk_data *walk_data, - tree stmt, stmt_ann_t ann) +eliminate_redundant_computations (tree stmt, stmt_ann_t ann) { tree *expr_p, def = NULL_TREE; bool insert = true; tree cached_lhs; bool retval = false; + bool modify_expr_p = false; if (TREE_CODE (stmt) == MODIFY_EXPR) def = TREE_OPERAND (stmt, 0); @@ -2633,7 +2572,7 @@ eliminate_redundant_computations (struct dom_walk_data *walk_data, then try to simplify the RHS and lookup the new RHS in the hash table. */ if (! cached_lhs && TREE_CODE (stmt) == MODIFY_EXPR) - cached_lhs = simplify_rhs_and_lookup_avail_expr (walk_data, stmt, insert); + cached_lhs = simplify_rhs_and_lookup_avail_expr (stmt, insert); /* Similarly if this is a COND_EXPR and we did not find its expression in the hash table, simplify the condition and try again. */ @@ -2651,9 +2590,15 @@ eliminate_redundant_computations (struct dom_walk_data *walk_data, else if (TREE_CODE (stmt) == SWITCH_EXPR) expr_p = &SWITCH_COND (stmt); else if (TREE_CODE (stmt) == RETURN_EXPR && TREE_OPERAND (stmt, 0)) - expr_p = &TREE_OPERAND (TREE_OPERAND (stmt, 0), 1); + { + expr_p = &TREE_OPERAND (TREE_OPERAND (stmt, 0), 1); + modify_expr_p = true; + } else - expr_p = &TREE_OPERAND (stmt, 1); + { + expr_p = &TREE_OPERAND (stmt, 1); + modify_expr_p = true; + } /* It is safe to ignore types here since we have already done type checking in the hashing and equality routines. In fact @@ -2661,7 +2606,10 @@ eliminate_redundant_computations (struct dom_walk_data *walk_data, propagation. Also, make sure that it is safe to propagate CACHED_LHS into *EXPR_P. */ if (cached_lhs - && (TREE_CODE (cached_lhs) != SSA_NAME + && ((TREE_CODE (cached_lhs) != SSA_NAME + && (modify_expr_p + || tree_ssa_useless_type_conversion_1 (TREE_TYPE (*expr_p), + TREE_TYPE (cached_lhs)))) || may_propagate_copy (*expr_p, cached_lhs))) { if (dump_file && (dump_flags & TDF_DETAILS)) @@ -2684,6 +2632,11 @@ eliminate_redundant_computations (struct dom_walk_data *walk_data, || (POINTER_TYPE_P (TREE_TYPE (*expr_p)) && is_gimple_min_invariant (cached_lhs))) retval = true; + + if (modify_expr_p + && !tree_ssa_useless_type_conversion_1 (TREE_TYPE (*expr_p), + TREE_TYPE (cached_lhs))) + cached_lhs = fold_convert (TREE_TYPE (*expr_p), cached_lhs); propagate_tree_value (expr_p, cached_lhs); mark_stmt_modified (stmt); @@ -2722,7 +2675,7 @@ record_equivalences_from_stmt (tree stmt, || is_gimple_min_invariant (rhs))) SSA_NAME_VALUE (lhs) = rhs; - if (expr_computes_nonzero (rhs)) + if (tree_expr_nonzero_p (rhs)) record_var_is_nonzero (lhs); } @@ -2940,7 +2893,7 @@ cprop_into_stmt (tree stmt) } -/* Optimize the statement pointed by iterator SI. +/* Optimize the statement pointed to by iterator SI. We try to perform some simplistic global redundancy elimination and constant propagation: @@ -2956,8 +2909,8 @@ cprop_into_stmt (tree stmt) the variable in the LHS in the CONST_AND_COPIES table. */ static void -optimize_stmt (struct dom_walk_data *walk_data, basic_block bb, - block_stmt_iterator si) +optimize_stmt (struct dom_walk_data *walk_data ATTRIBUTE_UNUSED, + basic_block bb, block_stmt_iterator si) { stmt_ann_t ann; tree stmt, old_stmt; @@ -3027,7 +2980,7 @@ optimize_stmt (struct dom_walk_data *walk_data, basic_block bb, if (may_optimize_p) may_have_exposed_new_symbols - |= eliminate_redundant_computations (walk_data, stmt, ann); + |= eliminate_redundant_computations (stmt, ann); /* Record any additional equivalences created by this statement. */ if (TREE_CODE (stmt) == MODIFY_EXPR) @@ -3150,7 +3103,7 @@ update_rhs_and_lookup_avail_expr (tree stmt, tree new_rhs, bool insert) NULL_TREE. Also, when an expression is first inserted in the AVAIL_EXPRS table, it - is also added to the stack pointed by BLOCK_AVAIL_EXPRS_P, so that they + is also added to the stack pointed to by BLOCK_AVAIL_EXPRS_P, so that they can be removed when we finish processing this block and its children. NOTE: This function assumes that STMT is a MODIFY_EXPR node that @@ -3192,11 +3145,8 @@ lookup_avail_expr (tree stmt, bool insert) { tree t = element->rhs; free (element); - - if (TREE_CODE (t) == EQ_EXPR) - return boolean_false_node; - else - return boolean_true_node; + return constant_boolean_node (TREE_CODE (t) != EQ_EXPR, + TREE_TYPE (t)); } } @@ -3253,10 +3203,7 @@ extract_range_from_cond (tree cond, tree *hi_p, tree *lo_p, int *inverted_p) record ranges for enumerations. Presumably this is due to the fact that they're rarely used directly. They are typically cast into an integer type and used that way. */ - if (TREE_CODE (type) != INTEGER_TYPE - /* We don't know how to deal with types with variable bounds. */ - || TREE_CODE (TYPE_MIN_VALUE (type)) != INTEGER_CST - || TREE_CODE (TYPE_MAX_VALUE (type)) != INTEGER_CST) + if (TREE_CODE (type) != INTEGER_TYPE) return 0; switch (TREE_CODE (cond)) @@ -3273,12 +3220,19 @@ extract_range_from_cond (tree cond, tree *hi_p, tree *lo_p, int *inverted_p) case GE_EXPR: low = op1; + + /* Get the highest value of the type. If not a constant, use that + of its base type, if it has one. */ high = TYPE_MAX_VALUE (type); + if (TREE_CODE (high) != INTEGER_CST && TREE_TYPE (type)) + high = TYPE_MAX_VALUE (TREE_TYPE (type)); inverted = 0; break; case GT_EXPR: high = TYPE_MAX_VALUE (type); + if (TREE_CODE (high) != INTEGER_CST && TREE_TYPE (type)) + high = TYPE_MAX_VALUE (TREE_TYPE (type)); if (!tree_int_cst_lt (op1, high)) return 0; low = int_const_binop (PLUS_EXPR, op1, integer_one_node, 1); @@ -3288,11 +3242,15 @@ extract_range_from_cond (tree cond, tree *hi_p, tree *lo_p, int *inverted_p) case LE_EXPR: high = op1; low = TYPE_MIN_VALUE (type); + if (TREE_CODE (low) != INTEGER_CST && TREE_TYPE (type)) + low = TYPE_MIN_VALUE (TREE_TYPE (type)); inverted = 0; break; case LT_EXPR: low = TYPE_MIN_VALUE (type); + if (TREE_CODE (low) != INTEGER_CST && TREE_TYPE (type)) + low = TYPE_MIN_VALUE (TREE_TYPE (type)); if (!tree_int_cst_lt (low, op1)) return 0; high = int_const_binop (MINUS_EXPR, op1, integer_one_node, 1);