X-Git-Url: http://git.sourceforge.jp/view?a=blobdiff_plain;f=gcc%2Fgimple-fold.c;h=0210ccfedb9b2b305194f1593699dcda62559b65;hb=7a7159f36c89351666b1e03043e2bdf1341acc29;hp=4fb1b3f0ba7a85534209e63022309c9d19ec9c1c;hpb=e4227d049cda6d627df1765ee46f66a25bbcdd3a;p=pf3gnuchains%2Fgcc-fork.git diff --git a/gcc/gimple-fold.c b/gcc/gimple-fold.c index 4fb1b3f0ba7..0210ccfedb9 100644 --- a/gcc/gimple-fold.c +++ b/gcc/gimple-fold.c @@ -1,5 +1,5 @@ /* Statement simplification on GIMPLE. - Copyright (C) 2010 Free Software Foundation, Inc. + Copyright (C) 2010, 2011, 2012 Free Software Foundation, Inc. Split out from tree-ssa-ccp.c. This file is part of GCC. @@ -24,707 +24,167 @@ along with GCC; see the file COPYING3. If not see #include "tm.h" #include "tree.h" #include "flags.h" -#include "rtl.h" -#include "tm_p.h" -#include "ggc.h" -#include "basic-block.h" -#include "output.h" -#include "expr.h" #include "function.h" -#include "diagnostic.h" -#include "timevar.h" #include "tree-dump.h" #include "tree-flow.h" #include "tree-pass.h" #include "tree-ssa-propagate.h" -#include "value-prof.h" -#include "langhooks.h" #include "target.h" +#include "gimple-fold.h" + +/* Return true when DECL can be referenced from current unit. + We can get declarations that are not possible to reference for + various reasons: + + 1) When analyzing C++ virtual tables. + C++ virtual tables do have known constructors even + when they are keyed to other compilation unit. + Those tables can contain pointers to methods and vars + in other units. Those methods have both STATIC and EXTERNAL + set. + 2) In WHOPR mode devirtualization might lead to reference + to method that was partitioned elsehwere. + In this case we have static VAR_DECL or FUNCTION_DECL + that has no corresponding callgraph/varpool node + declaring the body. + 3) COMDAT functions referred by external vtables that + we devirtualize only during final copmilation stage. + At this time we already decided that we will not output + the function body and thus we can't reference the symbol + directly. */ - -/* If SYM is a constant variable with known value, return the value. - NULL_TREE is returned otherwise. */ - -tree -get_symbol_constant_value (tree sym) -{ - if (TREE_STATIC (sym) - && (TREE_READONLY (sym) - || TREE_CODE (sym) == CONST_DECL)) - { - tree val = DECL_INITIAL (sym); - if (val) - { - STRIP_NOPS (val); - if (is_gimple_min_invariant (val)) - { - if (TREE_CODE (val) == ADDR_EXPR) - { - tree base = get_base_address (TREE_OPERAND (val, 0)); - if (base && TREE_CODE (base) == VAR_DECL) - { - TREE_ADDRESSABLE (base) = 1; - if (gimple_referenced_vars (cfun)) - add_referenced_var (base); - } - } - return val; - } - } - /* Variables declared 'const' without an initializer - have zero as the initializer if they may not be - overridden at link or run time. */ - if (!val - && !DECL_EXTERNAL (sym) - && targetm.binds_local_p (sym) - && (INTEGRAL_TYPE_P (TREE_TYPE (sym)) - || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym)))) - return fold_convert (TREE_TYPE (sym), integer_zero_node); - } - - return NULL_TREE; -} - - -/* Return true if we may propagate the address expression ADDR into the - dereference DEREF and cancel them. */ - -bool -may_propagate_address_into_dereference (tree addr, tree deref) -{ - gcc_assert (INDIRECT_REF_P (deref) - && TREE_CODE (addr) == ADDR_EXPR); - - /* Don't propagate if ADDR's operand has incomplete type. */ - if (!COMPLETE_TYPE_P (TREE_TYPE (TREE_OPERAND (addr, 0)))) - return false; - - /* If the address is invariant then we do not need to preserve restrict - qualifications. But we do need to preserve volatile qualifiers until - we can annotate the folded dereference itself properly. */ - if (is_gimple_min_invariant (addr) - && (!TREE_THIS_VOLATILE (deref) - || TYPE_VOLATILE (TREE_TYPE (addr)))) - return useless_type_conversion_p (TREE_TYPE (deref), - TREE_TYPE (TREE_OPERAND (addr, 0))); - - /* Else both the address substitution and the folding must result in - a valid useless type conversion sequence. */ - return (useless_type_conversion_p (TREE_TYPE (TREE_OPERAND (deref, 0)), - TREE_TYPE (addr)) - && useless_type_conversion_p (TREE_TYPE (deref), - TREE_TYPE (TREE_OPERAND (addr, 0)))); -} - - -/* A subroutine of fold_stmt. Attempts to fold *(A+O) to A[X]. - BASE is an array type. OFFSET is a byte displacement. ORIG_TYPE - is the desired result type. - - LOC is the location of the original expression. */ - -static tree -maybe_fold_offset_to_array_ref (location_t loc, tree base, tree offset, - tree orig_type, - bool allow_negative_idx) +static bool +can_refer_decl_in_current_unit_p (tree decl) { - tree min_idx, idx, idx_type, elt_offset = integer_zero_node; - tree array_type, elt_type, elt_size; - tree domain_type; - - /* If BASE is an ARRAY_REF, we can pick up another offset (this time - measured in units of the size of elements type) from that ARRAY_REF). - We can't do anything if either is variable. - - The case we handle here is *(&A[N]+O). */ - if (TREE_CODE (base) == ARRAY_REF) - { - tree low_bound = array_ref_low_bound (base); - - elt_offset = TREE_OPERAND (base, 1); - if (TREE_CODE (low_bound) != INTEGER_CST - || TREE_CODE (elt_offset) != INTEGER_CST) - return NULL_TREE; - - elt_offset = int_const_binop (MINUS_EXPR, elt_offset, low_bound, 0); - base = TREE_OPERAND (base, 0); - } - - /* Ignore stupid user tricks of indexing non-array variables. */ - array_type = TREE_TYPE (base); - if (TREE_CODE (array_type) != ARRAY_TYPE) - return NULL_TREE; - elt_type = TREE_TYPE (array_type); - if (!useless_type_conversion_p (orig_type, elt_type)) - return NULL_TREE; - - /* Use signed size type for intermediate computation on the index. */ - idx_type = ssizetype; - - /* If OFFSET and ELT_OFFSET are zero, we don't care about the size of the - element type (so we can use the alignment if it's not constant). - Otherwise, compute the offset as an index by using a division. If the - division isn't exact, then don't do anything. */ - elt_size = TYPE_SIZE_UNIT (elt_type); - if (!elt_size) - return NULL; - if (integer_zerop (offset)) - { - if (TREE_CODE (elt_size) != INTEGER_CST) - elt_size = size_int (TYPE_ALIGN (elt_type)); - - idx = build_int_cst (idx_type, 0); - } - else - { - unsigned HOST_WIDE_INT lquo, lrem; - HOST_WIDE_INT hquo, hrem; - double_int soffset; - - /* The final array offset should be signed, so we need - to sign-extend the (possibly pointer) offset here - and use signed division. */ - soffset = double_int_sext (tree_to_double_int (offset), - TYPE_PRECISION (TREE_TYPE (offset))); - if (TREE_CODE (elt_size) != INTEGER_CST - || div_and_round_double (TRUNC_DIV_EXPR, 0, - soffset.low, soffset.high, - TREE_INT_CST_LOW (elt_size), - TREE_INT_CST_HIGH (elt_size), - &lquo, &hquo, &lrem, &hrem) - || lrem || hrem) - return NULL_TREE; - - idx = build_int_cst_wide (idx_type, lquo, hquo); - } - - /* Assume the low bound is zero. If there is a domain type, get the - low bound, if any, convert the index into that type, and add the - low bound. */ - min_idx = build_int_cst (idx_type, 0); - domain_type = TYPE_DOMAIN (array_type); - if (domain_type) - { - idx_type = domain_type; - if (TYPE_MIN_VALUE (idx_type)) - min_idx = TYPE_MIN_VALUE (idx_type); - else - min_idx = fold_convert (idx_type, min_idx); - - if (TREE_CODE (min_idx) != INTEGER_CST) - return NULL_TREE; - - elt_offset = fold_convert (idx_type, elt_offset); - } - - if (!integer_zerop (min_idx)) - idx = int_const_binop (PLUS_EXPR, idx, min_idx, 0); - if (!integer_zerop (elt_offset)) - idx = int_const_binop (PLUS_EXPR, idx, elt_offset, 0); - - /* Make sure to possibly truncate late after offsetting. */ - idx = fold_convert (idx_type, idx); - - /* We don't want to construct access past array bounds. For example - char *(c[4]); - c[3][2]; - should not be simplified into (*c)[14] or tree-vrp will - give false warnings. The same is true for - struct A { long x; char d[0]; } *a; - (char *)a - 4; - which should be not folded to &a->d[-8]. */ - if (domain_type - && TYPE_MAX_VALUE (domain_type) - && TREE_CODE (TYPE_MAX_VALUE (domain_type)) == INTEGER_CST) - { - tree up_bound = TYPE_MAX_VALUE (domain_type); + struct varpool_node *vnode; + struct cgraph_node *node; - if (tree_int_cst_lt (up_bound, idx) - /* Accesses after the end of arrays of size 0 (gcc - extension) and 1 are likely intentional ("struct - hack"). */ - && compare_tree_int (up_bound, 1) > 0) - return NULL_TREE; - } - if (domain_type - && TYPE_MIN_VALUE (domain_type)) + if (!TREE_STATIC (decl) && !DECL_EXTERNAL (decl)) + return true; + /* External flag is set, so we deal with C++ reference + to static object from other file. */ + if (DECL_EXTERNAL (decl) && TREE_STATIC (decl) + && TREE_CODE (decl) == VAR_DECL) { - if (!allow_negative_idx - && TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST - && tree_int_cst_lt (idx, TYPE_MIN_VALUE (domain_type))) - return NULL_TREE; + /* Just be sure it is not big in frontend setting + flags incorrectly. Those variables should never + be finalized. */ + gcc_checking_assert (!(vnode = varpool_get_node (decl)) + || !vnode->finalized); + return false; } - else if (!allow_negative_idx - && compare_tree_int (idx, 0) < 0) - return NULL_TREE; - - { - tree t = build4 (ARRAY_REF, elt_type, base, idx, NULL_TREE, NULL_TREE); - SET_EXPR_LOCATION (t, loc); - return t; - } -} - - -/* Attempt to fold *(S+O) to S.X. - BASE is a record type. OFFSET is a byte displacement. ORIG_TYPE - is the desired result type. - - LOC is the location of the original expression. */ - -static tree -maybe_fold_offset_to_component_ref (location_t loc, tree record_type, - tree base, tree offset, tree orig_type) -{ - tree f, t, field_type, tail_array_field, field_offset; - tree ret; - tree new_base; - - if (TREE_CODE (record_type) != RECORD_TYPE - && TREE_CODE (record_type) != UNION_TYPE - && TREE_CODE (record_type) != QUAL_UNION_TYPE) - return NULL_TREE; - - /* Short-circuit silly cases. */ - if (useless_type_conversion_p (record_type, orig_type)) - return NULL_TREE; - - tail_array_field = NULL_TREE; - for (f = TYPE_FIELDS (record_type); f ; f = TREE_CHAIN (f)) + /* When function is public, we always can introduce new reference. + Exception are the COMDAT functions where introducing a direct + reference imply need to include function body in the curren tunit. */ + if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl)) + return true; + /* We are not at ltrans stage; so don't worry about WHOPR. + Also when still gimplifying all referred comdat functions will be + produced. + ??? as observed in PR20991 for already optimized out comdat virtual functions + we may not neccesarily give up because the copy will be output elsewhere when + corresponding vtable is output. */ + if (!flag_ltrans && (!DECL_COMDAT (decl) || !cgraph_function_flags_ready)) + return true; + /* If we already output the function body, we are safe. */ + if (TREE_ASM_WRITTEN (decl)) + return true; + if (TREE_CODE (decl) == FUNCTION_DECL) { - int cmp; - - if (TREE_CODE (f) != FIELD_DECL) - continue; - if (DECL_BIT_FIELD (f)) - continue; - - if (!DECL_FIELD_OFFSET (f)) - continue; - field_offset = byte_position (f); - if (TREE_CODE (field_offset) != INTEGER_CST) - continue; - - /* ??? Java creates "interesting" fields for representing base classes. - They have no name, and have no context. With no context, we get into - trouble with nonoverlapping_component_refs_p. Skip them. */ - if (!DECL_FIELD_CONTEXT (f)) - continue; - - /* The previous array field isn't at the end. */ - tail_array_field = NULL_TREE; - - /* Check to see if this offset overlaps with the field. */ - cmp = tree_int_cst_compare (field_offset, offset); - if (cmp > 0) - continue; - - field_type = TREE_TYPE (f); - - /* Here we exactly match the offset being checked. If the types match, - then we can return that field. */ - if (cmp == 0 - && useless_type_conversion_p (orig_type, field_type)) - { - t = fold_build3 (COMPONENT_REF, field_type, base, f, NULL_TREE); - return t; - } - - /* Don't care about offsets into the middle of scalars. */ - if (!AGGREGATE_TYPE_P (field_type)) - continue; - - /* Check for array at the end of the struct. This is often - used as for flexible array members. We should be able to - turn this into an array access anyway. */ - if (TREE_CODE (field_type) == ARRAY_TYPE) - tail_array_field = f; - - /* Check the end of the field against the offset. */ - if (!DECL_SIZE_UNIT (f) - || TREE_CODE (DECL_SIZE_UNIT (f)) != INTEGER_CST) - continue; - t = int_const_binop (MINUS_EXPR, offset, field_offset, 1); - if (!tree_int_cst_lt (t, DECL_SIZE_UNIT (f))) - continue; - - /* If we matched, then set offset to the displacement into - this field. */ - new_base = fold_build3 (COMPONENT_REF, field_type, base, f, NULL_TREE); - SET_EXPR_LOCATION (new_base, loc); - - /* Recurse to possibly find the match. */ - ret = maybe_fold_offset_to_array_ref (loc, new_base, t, orig_type, - f == TYPE_FIELDS (record_type)); - if (ret) - return ret; - ret = maybe_fold_offset_to_component_ref (loc, field_type, new_base, t, - orig_type); - if (ret) - return ret; + node = cgraph_get_node (decl); + /* Check that we still have function body and that we didn't took + the decision to eliminate offline copy of the function yet. + The second is important when devirtualization happens during final + compilation stage when making a new reference no longer makes callee + to be compiled. */ + if (!node || !node->analyzed || node->global.inlined_to) + return false; } - - if (!tail_array_field) - return NULL_TREE; - - f = tail_array_field; - field_type = TREE_TYPE (f); - offset = int_const_binop (MINUS_EXPR, offset, byte_position (f), 1); - - /* If we get here, we've got an aggregate field, and a possibly - nonzero offset into them. Recurse and hope for a valid match. */ - base = fold_build3 (COMPONENT_REF, field_type, base, f, NULL_TREE); - SET_EXPR_LOCATION (base, loc); - - t = maybe_fold_offset_to_array_ref (loc, base, offset, orig_type, - f == TYPE_FIELDS (record_type)); - if (t) - return t; - return maybe_fold_offset_to_component_ref (loc, field_type, base, offset, - orig_type); -} - -/* Attempt to express (ORIG_TYPE)BASE+OFFSET as BASE->field_of_orig_type - or BASE[index] or by combination of those. - - LOC is the location of original expression. - - Before attempting the conversion strip off existing ADDR_EXPRs and - handled component refs. */ - -tree -maybe_fold_offset_to_reference (location_t loc, tree base, tree offset, - tree orig_type) -{ - tree ret; - tree type; - - STRIP_NOPS (base); - if (TREE_CODE (base) != ADDR_EXPR) - return NULL_TREE; - - base = TREE_OPERAND (base, 0); - - /* Handle case where existing COMPONENT_REF pick e.g. wrong field of union, - so it needs to be removed and new COMPONENT_REF constructed. - The wrong COMPONENT_REF are often constructed by folding the - (type *)&object within the expression (type *)&object+offset */ - if (handled_component_p (base)) + else if (TREE_CODE (decl) == VAR_DECL) { - HOST_WIDE_INT sub_offset, size, maxsize; - tree newbase; - newbase = get_ref_base_and_extent (base, &sub_offset, - &size, &maxsize); - gcc_assert (newbase); - if (size == maxsize - && size != -1 - && !(sub_offset & (BITS_PER_UNIT - 1))) - { - base = newbase; - if (sub_offset) - offset = int_const_binop (PLUS_EXPR, offset, - build_int_cst (TREE_TYPE (offset), - sub_offset / BITS_PER_UNIT), 1); - } + vnode = varpool_get_node (decl); + if (!vnode || !vnode->finalized) + return false; } - if (useless_type_conversion_p (orig_type, TREE_TYPE (base)) - && integer_zerop (offset)) - return base; - type = TREE_TYPE (base); - - ret = maybe_fold_offset_to_component_ref (loc, type, base, offset, orig_type); - if (!ret) - ret = maybe_fold_offset_to_array_ref (loc, base, offset, orig_type, true); - - return ret; + return true; } -/* Attempt to express (ORIG_TYPE)&BASE+OFFSET as &BASE->field_of_orig_type - or &BASE[index] or by combination of those. - - LOC is the location of the original expression. - - Before attempting the conversion strip off existing component refs. */ +/* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into + acceptable form for is_gimple_min_invariant. */ tree -maybe_fold_offset_to_address (location_t loc, tree addr, tree offset, - tree orig_type) +canonicalize_constructor_val (tree cval) { - tree t; - - gcc_assert (POINTER_TYPE_P (TREE_TYPE (addr)) - && POINTER_TYPE_P (orig_type)); - - t = maybe_fold_offset_to_reference (loc, addr, offset, - TREE_TYPE (orig_type)); - if (t != NULL_TREE) + tree orig_cval = cval; + STRIP_NOPS (cval); + if (TREE_CODE (cval) == POINTER_PLUS_EXPR + && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST) { - tree orig = addr; - tree ptr_type; - - /* For __builtin_object_size to function correctly we need to - make sure not to fold address arithmetic so that we change - reference from one array to another. This would happen for - example for - - struct X { char s1[10]; char s2[10] } s; - char *foo (void) { return &s.s2[-4]; } - - where we need to avoid generating &s.s1[6]. As the C and - C++ frontends create different initial trees - (char *) &s.s1 + -4 vs. &s.s1[-4] we have to do some - sophisticated comparisons here. Note that checking for the - condition after the fact is easier than trying to avoid doing - the folding. */ - STRIP_NOPS (orig); - if (TREE_CODE (orig) == ADDR_EXPR) - orig = TREE_OPERAND (orig, 0); - if ((TREE_CODE (orig) == ARRAY_REF - || (TREE_CODE (orig) == COMPONENT_REF - && TREE_CODE (TREE_TYPE (TREE_OPERAND (orig, 1))) == ARRAY_TYPE)) - && (TREE_CODE (t) == ARRAY_REF - || TREE_CODE (t) == COMPONENT_REF) - && !operand_equal_p (TREE_CODE (orig) == ARRAY_REF - ? TREE_OPERAND (orig, 0) : orig, - TREE_CODE (t) == ARRAY_REF - ? TREE_OPERAND (t, 0) : t, 0)) - return NULL_TREE; - - ptr_type = build_pointer_type (TREE_TYPE (t)); - if (!useless_type_conversion_p (orig_type, ptr_type)) - return NULL_TREE; - return build_fold_addr_expr_with_type_loc (loc, t, ptr_type); + tree ptr = TREE_OPERAND (cval, 0); + if (is_gimple_min_invariant (ptr)) + cval = build1_loc (EXPR_LOCATION (cval), + ADDR_EXPR, TREE_TYPE (ptr), + fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)), + ptr, + fold_convert (ptr_type_node, + TREE_OPERAND (cval, 1)))); } - - return NULL_TREE; -} - -/* A subroutine of fold_stmt. Attempt to simplify *(BASE+OFFSET). - Return the simplified expression, or NULL if nothing could be done. */ - -static tree -maybe_fold_stmt_indirect (tree expr, tree base, tree offset) -{ - tree t; - bool volatile_p = TREE_THIS_VOLATILE (expr); - location_t loc = EXPR_LOCATION (expr); - - /* We may well have constructed a double-nested PLUS_EXPR via multiple - substitutions. Fold that down to one. Remove NON_LVALUE_EXPRs that - are sometimes added. */ - base = fold (base); - STRIP_TYPE_NOPS (base); - TREE_OPERAND (expr, 0) = base; - - /* One possibility is that the address reduces to a string constant. */ - t = fold_read_from_constant_string (expr); - if (t) - return t; - - /* Add in any offset from a POINTER_PLUS_EXPR. */ - if (TREE_CODE (base) == POINTER_PLUS_EXPR) + if (TREE_CODE (cval) == ADDR_EXPR) { - tree offset2; + tree base = get_base_address (TREE_OPERAND (cval, 0)); - offset2 = TREE_OPERAND (base, 1); - if (TREE_CODE (offset2) != INTEGER_CST) + if (base + && (TREE_CODE (base) == VAR_DECL + || TREE_CODE (base) == FUNCTION_DECL) + && !can_refer_decl_in_current_unit_p (base)) return NULL_TREE; - base = TREE_OPERAND (base, 0); - - offset = fold_convert (sizetype, - int_const_binop (PLUS_EXPR, offset, offset2, 1)); - } - - if (TREE_CODE (base) == ADDR_EXPR) - { - tree base_addr = base; - - /* Strip the ADDR_EXPR. */ - base = TREE_OPERAND (base, 0); - - /* Fold away CONST_DECL to its value, if the type is scalar. */ - if (TREE_CODE (base) == CONST_DECL - && is_gimple_min_invariant (DECL_INITIAL (base))) - return DECL_INITIAL (base); - - /* If there is no offset involved simply return the folded base. */ - if (integer_zerop (offset)) - return base; - - /* Try folding *(&B+O) to B.X. */ - t = maybe_fold_offset_to_reference (loc, base_addr, offset, - TREE_TYPE (expr)); - if (t) - { - /* Preserve volatileness of the original expression. - We can end up with a plain decl here which is shared - and we shouldn't mess with its flags. */ - if (!SSA_VAR_P (t)) - TREE_THIS_VOLATILE (t) = volatile_p; - return t; - } - } - else - { - /* We can get here for out-of-range string constant accesses, - such as "_"[3]. Bail out of the entire substitution search - and arrange for the entire statement to be replaced by a - call to __builtin_trap. In all likelihood this will all be - constant-folded away, but in the meantime we can't leave with - something that get_expr_operands can't understand. */ - - t = base; - STRIP_NOPS (t); - if (TREE_CODE (t) == ADDR_EXPR - && TREE_CODE (TREE_OPERAND (t, 0)) == STRING_CST) + if (base && TREE_CODE (base) == VAR_DECL) { - /* FIXME: Except that this causes problems elsewhere with dead - code not being deleted, and we die in the rtl expanders - because we failed to remove some ssa_name. In the meantime, - just return zero. */ - /* FIXME2: This condition should be signaled by - fold_read_from_constant_string directly, rather than - re-checking for it here. */ - return integer_zero_node; + TREE_ADDRESSABLE (base) = 1; + if (cfun && gimple_referenced_vars (cfun)) + add_referenced_var (base); } + /* Fixup types in global initializers. */ + if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0))) + cval = build_fold_addr_expr (TREE_OPERAND (cval, 0)); - /* Try folding *(B+O) to B->X. Still an improvement. */ - if (POINTER_TYPE_P (TREE_TYPE (base))) - { - t = maybe_fold_offset_to_reference (loc, base, offset, - TREE_TYPE (expr)); - if (t) - return t; - } + if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval))) + cval = fold_convert (TREE_TYPE (orig_cval), cval); + return cval; } - - /* Otherwise we had an offset that we could not simplify. */ - return NULL_TREE; + return orig_cval; } - -/* A quaint feature extant in our address arithmetic is that there - can be hidden type changes here. The type of the result need - not be the same as the type of the input pointer. - - What we're after here is an expression of the form - (T *)(&array + const) - where array is OP0, const is OP1, RES_TYPE is T and - the cast doesn't actually exist, but is implicit in the - type of the POINTER_PLUS_EXPR. We'd like to turn this into - &array[x] - which may be able to propagate further. */ +/* If SYM is a constant variable with known value, return the value. + NULL_TREE is returned otherwise. */ tree -maybe_fold_stmt_addition (location_t loc, tree res_type, tree op0, tree op1) +get_symbol_constant_value (tree sym) { - tree ptd_type; - tree t; - - /* The first operand should be an ADDR_EXPR. */ - if (TREE_CODE (op0) != ADDR_EXPR) - return NULL_TREE; - op0 = TREE_OPERAND (op0, 0); - - /* It had better be a constant. */ - if (TREE_CODE (op1) != INTEGER_CST) + if (const_value_known_p (sym)) { - /* Or op0 should now be A[0] and the non-constant offset defined - via a multiplication by the array element size. */ - if (TREE_CODE (op0) == ARRAY_REF - && integer_zerop (TREE_OPERAND (op0, 1)) - && TREE_CODE (op1) == SSA_NAME - && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (op0)), 1)) + tree val = DECL_INITIAL (sym); + if (val) { - gimple offset_def = SSA_NAME_DEF_STMT (op1); - if (!is_gimple_assign (offset_def)) + val = canonicalize_constructor_val (val); + if (val && is_gimple_min_invariant (val)) + return val; + else return NULL_TREE; - - if (gimple_assign_rhs_code (offset_def) == MULT_EXPR - && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST - && tree_int_cst_equal (gimple_assign_rhs2 (offset_def), - TYPE_SIZE_UNIT (TREE_TYPE (op0)))) - return build_fold_addr_expr - (build4 (ARRAY_REF, TREE_TYPE (op0), - TREE_OPERAND (op0, 0), - gimple_assign_rhs1 (offset_def), - TREE_OPERAND (op0, 2), - TREE_OPERAND (op0, 3))); - else if (integer_onep (TYPE_SIZE_UNIT (TREE_TYPE (op0))) - && gimple_assign_rhs_code (offset_def) != MULT_EXPR) - return build_fold_addr_expr - (build4 (ARRAY_REF, TREE_TYPE (op0), - TREE_OPERAND (op0, 0), - op1, - TREE_OPERAND (op0, 2), - TREE_OPERAND (op0, 3))); } - return NULL_TREE; + /* Variables declared 'const' without an initializer + have zero as the initializer if they may not be + overridden at link or run time. */ + if (!val + && (INTEGRAL_TYPE_P (TREE_TYPE (sym)) + || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym)))) + return build_zero_cst (TREE_TYPE (sym)); } - /* If the first operand is an ARRAY_REF, expand it so that we can fold - the offset into it. */ - while (TREE_CODE (op0) == ARRAY_REF) - { - tree array_obj = TREE_OPERAND (op0, 0); - tree array_idx = TREE_OPERAND (op0, 1); - tree elt_type = TREE_TYPE (op0); - tree elt_size = TYPE_SIZE_UNIT (elt_type); - tree min_idx; - - if (TREE_CODE (array_idx) != INTEGER_CST) - break; - if (TREE_CODE (elt_size) != INTEGER_CST) - break; - - /* Un-bias the index by the min index of the array type. */ - min_idx = TYPE_DOMAIN (TREE_TYPE (array_obj)); - if (min_idx) - { - min_idx = TYPE_MIN_VALUE (min_idx); - if (min_idx) - { - if (TREE_CODE (min_idx) != INTEGER_CST) - break; - - array_idx = fold_convert (TREE_TYPE (min_idx), array_idx); - if (!integer_zerop (min_idx)) - array_idx = int_const_binop (MINUS_EXPR, array_idx, - min_idx, 0); - } - } - - /* Convert the index to a byte offset. */ - array_idx = fold_convert (sizetype, array_idx); - array_idx = int_const_binop (MULT_EXPR, array_idx, elt_size, 0); - - /* Update the operands for the next round, or for folding. */ - op1 = int_const_binop (PLUS_EXPR, - array_idx, op1, 0); - op0 = array_obj; - } + return NULL_TREE; +} - ptd_type = TREE_TYPE (res_type); - /* If we want a pointer to void, reconstruct the reference from the - array element type. A pointer to that can be trivially converted - to void *. This happens as we fold (void *)(ptr p+ off). */ - if (VOID_TYPE_P (ptd_type) - && TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE) - ptd_type = TREE_TYPE (TREE_TYPE (op0)); - - /* At which point we can try some of the same things as for indirects. */ - t = maybe_fold_offset_to_array_ref (loc, op0, op1, ptd_type, true); - if (!t) - t = maybe_fold_offset_to_component_ref (loc, TREE_TYPE (op0), op0, op1, - ptd_type); - if (t) - { - t = build1 (ADDR_EXPR, res_type, t); - SET_EXPR_LOCATION (t, loc); - } - return t; -} /* Subroutine of fold_stmt. We perform several simplifications of the memory reference tree EXPR and make sure to re-gimplify them properly @@ -735,43 +195,40 @@ static tree maybe_fold_reference (tree expr, bool is_lhs) { tree *t = &expr; - - if (TREE_CODE (expr) == ARRAY_REF - && !is_lhs) - { - tree tem = fold_read_from_constant_string (expr); - if (tem) - return tem; - } - - /* ??? We might want to open-code the relevant remaining cases - to avoid using the generic fold. */ - if (handled_component_p (*t) - && CONSTANT_CLASS_P (TREE_OPERAND (*t, 0))) - { - tree tem = fold (*t); - if (tem != *t) - return tem; - } + tree result; + + if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR + || TREE_CODE (expr) == REALPART_EXPR + || TREE_CODE (expr) == IMAGPART_EXPR) + && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0))) + return fold_unary_loc (EXPR_LOCATION (expr), + TREE_CODE (expr), + TREE_TYPE (expr), + TREE_OPERAND (expr, 0)); + else if (TREE_CODE (expr) == BIT_FIELD_REF + && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0))) + return fold_ternary_loc (EXPR_LOCATION (expr), + TREE_CODE (expr), + TREE_TYPE (expr), + TREE_OPERAND (expr, 0), + TREE_OPERAND (expr, 1), + TREE_OPERAND (expr, 2)); while (handled_component_p (*t)) t = &TREE_OPERAND (*t, 0); - if (TREE_CODE (*t) == INDIRECT_REF) + /* Canonicalize MEM_REFs invariant address operand. Do this first + to avoid feeding non-canonical MEM_REFs elsewhere. */ + if (TREE_CODE (*t) == MEM_REF + && !is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0))) { - tree tem = maybe_fold_stmt_indirect (*t, TREE_OPERAND (*t, 0), - integer_zero_node); - /* Avoid folding *"abc" = 5 into 'a' = 5. */ - if (is_lhs && tem && CONSTANT_CLASS_P (tem)) - tem = NULL_TREE; - if (!tem - && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR) - /* If we had a good reason for propagating the address here, - make sure we end up with valid gimple. See PR34989. */ - tem = TREE_OPERAND (TREE_OPERAND (*t, 0), 0); - + bool volatile_p = TREE_THIS_VOLATILE (*t); + tree tem = fold_binary (MEM_REF, TREE_TYPE (*t), + TREE_OPERAND (*t, 0), + TREE_OPERAND (*t, 1)); if (tem) { + TREE_THIS_VOLATILE (tem) = volatile_p; *t = tem; tem = maybe_fold_reference (expr, is_lhs); if (tem) @@ -779,14 +236,42 @@ maybe_fold_reference (tree expr, bool is_lhs) return expr; } } - else if (!is_lhs - && DECL_P (*t)) + + if (!is_lhs + && (result = fold_const_aggregate_ref (expr)) + && is_gimple_min_invariant (result)) + return result; + + /* Fold back MEM_REFs to reference trees. */ + if (TREE_CODE (*t) == MEM_REF + && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR + && integer_zerop (TREE_OPERAND (*t, 1)) + && (TREE_THIS_VOLATILE (*t) + == TREE_THIS_VOLATILE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))) + && !TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (*t, 1))) + && (TYPE_MAIN_VARIANT (TREE_TYPE (*t)) + == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (TREE_OPERAND (*t, 1))))) + /* We have to look out here to not drop a required conversion + from the rhs to the lhs if is_lhs, but we don't have the + rhs here to verify that. Thus require strict type + compatibility. */ + && types_compatible_p (TREE_TYPE (*t), + TREE_TYPE (TREE_OPERAND + (TREE_OPERAND (*t, 0), 0)))) + { + tree tem; + *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0); + tem = maybe_fold_reference (expr, is_lhs); + if (tem) + return tem; + return expr; + } + else if (TREE_CODE (*t) == TARGET_MEM_REF) { - tree tem = get_symbol_constant_value (*t); - if (tem - && useless_type_conversion_p (TREE_TYPE (*t), TREE_TYPE (tem))) + tree tem = maybe_fold_tmr (*t); + if (tem) { - *t = unshare_expr (tem); + *t = tem; tem = maybe_fold_reference (expr, is_lhs); if (tem) return tem; @@ -818,53 +303,23 @@ fold_gimple_assign (gimple_stmt_iterator *si) { tree rhs = gimple_assign_rhs1 (stmt); - /* Try to fold a conditional expression. */ - if (TREE_CODE (rhs) == COND_EXPR) - { - tree op0 = COND_EXPR_COND (rhs); - tree tem; - bool set = false; - location_t cond_loc = EXPR_LOCATION (rhs); - - if (COMPARISON_CLASS_P (op0)) - { - fold_defer_overflow_warnings (); - tem = fold_binary_loc (cond_loc, - TREE_CODE (op0), TREE_TYPE (op0), - TREE_OPERAND (op0, 0), - TREE_OPERAND (op0, 1)); - /* This is actually a conditional expression, not a GIMPLE - conditional statement, however, the valid_gimple_rhs_p - test still applies. */ - set = (tem && is_gimple_condexpr (tem) - && valid_gimple_rhs_p (tem)); - fold_undefer_overflow_warnings (set, stmt, 0); - } - else if (is_gimple_min_invariant (op0)) - { - tem = op0; - set = true; - } - else - return NULL_TREE; - - if (set) - result = fold_build3_loc (cond_loc, COND_EXPR, TREE_TYPE (rhs), tem, - COND_EXPR_THEN (rhs), COND_EXPR_ELSE (rhs)); - } - - else if (TREE_CODE (rhs) == TARGET_MEM_REF) - return maybe_fold_tmr (rhs); - - else if (REFERENCE_CLASS_P (rhs)) + if (REFERENCE_CLASS_P (rhs)) return maybe_fold_reference (rhs, false); else if (TREE_CODE (rhs) == ADDR_EXPR) { - tree tem = maybe_fold_reference (TREE_OPERAND (rhs, 0), true); - if (tem) + tree ref = TREE_OPERAND (rhs, 0); + tree tem = maybe_fold_reference (ref, true); + if (tem + && TREE_CODE (tem) == MEM_REF + && integer_zerop (TREE_OPERAND (tem, 1))) + result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0)); + else if (tem) result = fold_convert (TREE_TYPE (rhs), build_fold_addr_expr_loc (loc, tem)); + else if (TREE_CODE (ref) == MEM_REF + && integer_zerop (TREE_OPERAND (ref, 1))) + result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0)); } else if (TREE_CODE (rhs) == CONSTRUCTOR @@ -927,60 +382,116 @@ fold_gimple_assign (gimple_stmt_iterator *si) if (valid_gimple_rhs_p (result)) return result; } - else if (CONVERT_EXPR_CODE_P (subcode) - && POINTER_TYPE_P (gimple_expr_type (stmt)) - && POINTER_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (stmt)))) - { - tree type = gimple_expr_type (stmt); - tree t = maybe_fold_offset_to_address (loc, - gimple_assign_rhs1 (stmt), - integer_zero_node, type); - if (t) - return t; - } } break; case GIMPLE_BINARY_RHS: - /* Try to fold pointer addition. */ - if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR) - { - tree type = TREE_TYPE (gimple_assign_rhs1 (stmt)); - if (TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE) + /* Try to canonicalize for boolean-typed X the comparisons + X == 0, X == 1, X != 0, and X != 1. */ + if (gimple_assign_rhs_code (stmt) == EQ_EXPR + || gimple_assign_rhs_code (stmt) == NE_EXPR) + { + tree lhs = gimple_assign_lhs (stmt); + tree op1 = gimple_assign_rhs1 (stmt); + tree op2 = gimple_assign_rhs2 (stmt); + tree type = TREE_TYPE (op1); + + /* Check whether the comparison operands are of the same boolean + type as the result type is. + Check that second operand is an integer-constant with value + one or zero. */ + if (TREE_CODE (op2) == INTEGER_CST + && (integer_zerop (op2) || integer_onep (op2)) + && useless_type_conversion_p (TREE_TYPE (lhs), type)) { - type = build_pointer_type (TREE_TYPE (TREE_TYPE (type))); - if (!useless_type_conversion_p - (TREE_TYPE (gimple_assign_lhs (stmt)), type)) - type = TREE_TYPE (gimple_assign_rhs1 (stmt)); + enum tree_code cmp_code = gimple_assign_rhs_code (stmt); + bool is_logical_not = false; + + /* X == 0 and X != 1 is a logical-not.of X + X == 1 and X != 0 is X */ + if ((cmp_code == EQ_EXPR && integer_zerop (op2)) + || (cmp_code == NE_EXPR && integer_onep (op2))) + is_logical_not = true; + + if (is_logical_not == false) + result = op1; + /* Only for one-bit precision typed X the transformation + !X -> ~X is valied. */ + else if (TYPE_PRECISION (type) == 1) + result = build1_loc (gimple_location (stmt), BIT_NOT_EXPR, + type, op1); + /* Otherwise we use !X -> X ^ 1. */ + else + result = build2_loc (gimple_location (stmt), BIT_XOR_EXPR, + type, op1, build_int_cst (type, 1)); + } - result = maybe_fold_stmt_addition (gimple_location (stmt), - type, - gimple_assign_rhs1 (stmt), - gimple_assign_rhs2 (stmt)); } if (!result) result = fold_binary_loc (loc, subcode, - TREE_TYPE (gimple_assign_lhs (stmt)), - gimple_assign_rhs1 (stmt), - gimple_assign_rhs2 (stmt)); + TREE_TYPE (gimple_assign_lhs (stmt)), + gimple_assign_rhs1 (stmt), + gimple_assign_rhs2 (stmt)); if (result) { STRIP_USELESS_TYPE_CONVERSION (result); if (valid_gimple_rhs_p (result)) return result; + } + break; + + case GIMPLE_TERNARY_RHS: + /* Try to fold a conditional expression. */ + if (gimple_assign_rhs_code (stmt) == COND_EXPR) + { + tree op0 = gimple_assign_rhs1 (stmt); + tree tem; + bool set = false; + location_t cond_loc = gimple_location (stmt); + + if (COMPARISON_CLASS_P (op0)) + { + fold_defer_overflow_warnings (); + tem = fold_binary_loc (cond_loc, + TREE_CODE (op0), TREE_TYPE (op0), + TREE_OPERAND (op0, 0), + TREE_OPERAND (op0, 1)); + /* This is actually a conditional expression, not a GIMPLE + conditional statement, however, the valid_gimple_rhs_p + test still applies. */ + set = (tem && is_gimple_condexpr (tem) + && valid_gimple_rhs_p (tem)); + fold_undefer_overflow_warnings (set, stmt, 0); + } + else if (is_gimple_min_invariant (op0)) + { + tem = op0; + set = true; + } + else + return NULL_TREE; + + if (set) + result = fold_build3_loc (cond_loc, COND_EXPR, + TREE_TYPE (gimple_assign_lhs (stmt)), tem, + gimple_assign_rhs2 (stmt), + gimple_assign_rhs3 (stmt)); + } + + if (!result) + result = fold_ternary_loc (loc, subcode, + TREE_TYPE (gimple_assign_lhs (stmt)), + gimple_assign_rhs1 (stmt), + gimple_assign_rhs2 (stmt), + gimple_assign_rhs3 (stmt)); - /* Fold might have produced non-GIMPLE, so if we trust it blindly - we lose canonicalization opportunities. Do not go again - through fold here though, or the same non-GIMPLE will be - produced. */ - if (commutative_tree_code (subcode) - && tree_swap_operands_p (gimple_assign_rhs1 (stmt), - gimple_assign_rhs2 (stmt), false)) - return build2 (subcode, TREE_TYPE (gimple_assign_lhs (stmt)), - gimple_assign_rhs2 (stmt), - gimple_assign_rhs1 (stmt)); + if (result) + { + STRIP_USELESS_TYPE_CONVERSION (result); + if (valid_gimple_rhs_p (result)) + return result; } break; @@ -1024,72 +535,128 @@ fold_gimple_cond (gimple stmt) is replaced. If the call is expected to produces a result, then it is replaced by an assignment of the new RHS to the result variable. If the result is to be ignored, then the call is replaced by a - GIMPLE_NOP. */ + GIMPLE_NOP. A proper VDEF chain is retained by making the first + VUSE and the last VDEF of the whole sequence be the same as the replaced + statement and using new SSA names for stores in between. */ void gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr) { tree lhs; - tree tmp = NULL_TREE; /* Silence warning. */ gimple stmt, new_stmt; gimple_stmt_iterator i; gimple_seq stmts = gimple_seq_alloc(); struct gimplify_ctx gctx; - gimple last = NULL; + gimple last; + gimple laststore; + tree reaching_vuse; stmt = gsi_stmt (*si_p); gcc_assert (is_gimple_call (stmt)); - lhs = gimple_call_lhs (stmt); - push_gimplify_context (&gctx); + gctx.into_ssa = gimple_in_ssa_p (cfun); + lhs = gimple_call_lhs (stmt); if (lhs == NULL_TREE) - gimplify_and_add (expr, &stmts); + { + gimplify_and_add (expr, &stmts); + /* We can end up with folding a memcpy of an empty class assignment + which gets optimized away by C++ gimplification. */ + if (gimple_seq_empty_p (stmts)) + { + pop_gimplify_context (NULL); + if (gimple_in_ssa_p (cfun)) + { + unlink_stmt_vdef (stmt); + release_defs (stmt); + } + gsi_replace (si_p, gimple_build_nop (), true); + return; + } + } else - tmp = get_initialized_tmp_var (expr, &stmts, NULL); + { + tree tmp = get_initialized_tmp_var (expr, &stmts, NULL); + new_stmt = gimple_build_assign (lhs, tmp); + i = gsi_last (stmts); + gsi_insert_after_without_update (&i, new_stmt, + GSI_CONTINUE_LINKING); + } pop_gimplify_context (NULL); if (gimple_has_location (stmt)) annotate_all_with_location (stmts, gimple_location (stmt)); - /* The replacement can expose previously unreferenced variables. */ + /* First iterate over the replacement statements backward, assigning + virtual operands to their defining statements. */ + laststore = NULL; + for (i = gsi_last (stmts); !gsi_end_p (i); gsi_prev (&i)) + { + new_stmt = gsi_stmt (i); + if ((gimple_assign_single_p (new_stmt) + && !is_gimple_reg (gimple_assign_lhs (new_stmt))) + || (is_gimple_call (new_stmt) + && (gimple_call_flags (new_stmt) + & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0)) + { + tree vdef; + if (!laststore) + vdef = gimple_vdef (stmt); + else + vdef = make_ssa_name (gimple_vop (cfun), new_stmt); + gimple_set_vdef (new_stmt, vdef); + if (vdef && TREE_CODE (vdef) == SSA_NAME) + SSA_NAME_DEF_STMT (vdef) = new_stmt; + laststore = new_stmt; + } + } + + /* Second iterate over the statements forward, assigning virtual + operands to their uses. */ + last = NULL; + reaching_vuse = gimple_vuse (stmt); for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i)) { + /* Do not insert the last stmt in this loop but remember it + for replacing the original statement. */ if (last) { gsi_insert_before (si_p, last, GSI_NEW_STMT); gsi_next (si_p); } new_stmt = gsi_stmt (i); - find_new_referenced_vars (new_stmt); - mark_symbols_for_renaming (new_stmt); + /* The replacement can expose previously unreferenced variables. */ + if (gimple_in_ssa_p (cfun)) + find_new_referenced_vars (new_stmt); + /* If the new statement possibly has a VUSE, update it with exact SSA + name we know will reach this one. */ + if (gimple_has_mem_ops (new_stmt)) + gimple_set_vuse (new_stmt, reaching_vuse); + gimple_set_modified (new_stmt, true); + if (gimple_vdef (new_stmt)) + reaching_vuse = gimple_vdef (new_stmt); last = new_stmt; } - if (lhs == NULL_TREE) - { - unlink_stmt_vdef (stmt); - release_defs (stmt); - new_stmt = last; - } - else + /* If the new sequence does not do a store release the virtual + definition of the original statement. */ + if (reaching_vuse + && reaching_vuse == gimple_vuse (stmt)) { - if (last) + tree vdef = gimple_vdef (stmt); + if (vdef + && TREE_CODE (vdef) == SSA_NAME) { - gsi_insert_before (si_p, last, GSI_NEW_STMT); - gsi_next (si_p); + unlink_stmt_vdef (stmt); + release_ssa_name (vdef); } - new_stmt = gimple_build_assign (lhs, tmp); - gimple_set_vuse (new_stmt, gimple_vuse (stmt)); - gimple_set_vdef (new_stmt, gimple_vdef (stmt)); - move_ssa_defining_stmt_for_defs (new_stmt, stmt); } - gimple_set_location (new_stmt, gimple_location (stmt)); - gsi_replace (si_p, new_stmt, false); + /* Finally replace rhe original statement with the last. */ + gsi_replace (si_p, last, false); } /* Return the string length, maximum string length or maximum value of @@ -1109,13 +676,10 @@ get_maxval_strlen (tree arg, tree *length, bitmap visited, int type) if (TREE_CODE (arg) != SSA_NAME) { - if (TREE_CODE (arg) == COND_EXPR) - return get_maxval_strlen (COND_EXPR_THEN (arg), length, visited, type) - && get_maxval_strlen (COND_EXPR_ELSE (arg), length, visited, type); /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */ - else if (TREE_CODE (arg) == ADDR_EXPR - && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF - && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1))) + if (TREE_CODE (arg) == ADDR_EXPR + && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF + && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1))) { tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0); if (TREE_CODE (aop0) == INDIRECT_REF @@ -1157,9 +721,8 @@ get_maxval_strlen (tree arg, tree *length, bitmap visited, int type) } /* If we were already here, break the infinite cycle. */ - if (bitmap_bit_p (visited, SSA_NAME_VERSION (arg))) + if (!bitmap_set_bit (visited, SSA_NAME_VERSION (arg))) return true; - bitmap_set_bit (visited, SSA_NAME_VERSION (arg)); var = arg; def_stmt = SSA_NAME_DEF_STMT (var); @@ -1176,6 +739,13 @@ get_maxval_strlen (tree arg, tree *length, bitmap visited, int type) tree rhs = gimple_assign_rhs1 (def_stmt); return get_maxval_strlen (rhs, length, visited, type); } + else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR) + { + tree op2 = gimple_assign_rhs2 (def_stmt); + tree op3 = gimple_assign_rhs3 (def_stmt); + return get_maxval_strlen (op2, length, visited, type) + && get_maxval_strlen (op3, length, visited, type); + } return false; case GIMPLE_PHI: @@ -1246,6 +816,11 @@ gimple_fold_builtin (gimple stmt) if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD) return NULL_TREE; + /* Give up for always_inline inline builtins until they are + inlined. */ + if (avoid_folding_inline_builtin (callee)) + return NULL_TREE; + /* If the builtin could not be folded, and it has no argument list, we're done. */ nargs = gimple_call_num_args (stmt); @@ -1271,6 +846,7 @@ gimple_fold_builtin (gimple stmt) case BUILT_IN_MEMMOVE_CHK: case BUILT_IN_MEMSET_CHK: case BUILT_IN_STRNCPY_CHK: + case BUILT_IN_STPNCPY_CHK: arg_idx = 2; type = 2; break; @@ -1312,9 +888,9 @@ gimple_fold_builtin (gimple stmt) fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]); /* If the result is not a valid gimple value, or not a cast - of a valid gimple value, then we can not use the result. */ + of a valid gimple value, then we cannot use the result. */ if (is_gimple_val (new_val) - || (is_gimple_cast (new_val) + || (CONVERT_EXPR_P (new_val) && is_gimple_val (TREE_OPERAND (new_val, 0)))) return new_val; } @@ -1377,12 +953,14 @@ gimple_fold_builtin (gimple stmt) break; case BUILT_IN_STRNCPY_CHK: + case BUILT_IN_STPNCPY_CHK: if (val[2] && is_gimple_val (val[2]) && nargs == 4) - result = fold_builtin_strncpy_chk (loc, gimple_call_arg (stmt, 0), + result = fold_builtin_stxncpy_chk (loc, gimple_call_arg (stmt, 0), gimple_call_arg (stmt, 1), gimple_call_arg (stmt, 2), gimple_call_arg (stmt, 3), - val[2]); + val[2], ignore, + DECL_FUNCTION_CODE (callee)); break; case BUILT_IN_SNPRINTF_CHK: @@ -1401,135 +979,96 @@ gimple_fold_builtin (gimple stmt) return result; } -/* Search for a base binfo of BINFO that corresponds to TYPE and return it if - it is found or NULL_TREE if it is not. */ +/* Generate code adjusting the first parameter of a call statement determined + by GSI by DELTA. */ -static tree -get_base_binfo_for_type (tree binfo, tree type) +void +gimple_adjust_this_by_delta (gimple_stmt_iterator *gsi, tree delta) { - int i; - tree base_binfo; - tree res = NULL_TREE; - - for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++) - if (TREE_TYPE (base_binfo) == type) - { - gcc_assert (!res); - res = base_binfo; - } - - return res; + gimple call_stmt = gsi_stmt (*gsi); + tree parm, tmp; + gimple new_stmt; + + delta = convert_to_ptrofftype (delta); + gcc_assert (gimple_call_num_args (call_stmt) >= 1); + parm = gimple_call_arg (call_stmt, 0); + gcc_assert (POINTER_TYPE_P (TREE_TYPE (parm))); + tmp = create_tmp_var (TREE_TYPE (parm), NULL); + add_referenced_var (tmp); + + tmp = make_ssa_name (tmp, NULL); + new_stmt = gimple_build_assign_with_ops (POINTER_PLUS_EXPR, tmp, parm, delta); + SSA_NAME_DEF_STMT (tmp) = new_stmt; + gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT); + gimple_call_set_arg (call_stmt, 0, tmp); } -/* Return a binfo describing the part of object referenced by expression REF. - Return NULL_TREE if it cannot be determined. REF can consist of a series of - COMPONENT_REFs of a declaration or of an INDIRECT_REF or it can also be just - a simple declaration, indirect reference or an SSA_NAME. If the function - discovers an INDIRECT_REF or an SSA_NAME, it will assume that the - encapsulating type is described by KNOWN_BINFO, if it is not NULL_TREE. - Otherwise the first non-artificial field declaration or the base declaration - will be examined to get the encapsulating type. */ +/* Return a binfo to be used for devirtualization of calls based on an object + represented by a declaration (i.e. a global or automatically allocated one) + or NULL if it cannot be found or is not safe. CST is expected to be an + ADDR_EXPR of such object or the function will return NULL. Currently it is + safe to use such binfo only if it has no base binfo (i.e. no ancestors). */ tree -gimple_get_relevant_ref_binfo (tree ref, tree known_binfo) +gimple_extract_devirt_binfo_from_cst (tree cst) { - while (true) - { - if (TREE_CODE (ref) == COMPONENT_REF) - { - tree par_type; - tree binfo, base_binfo; - tree field = TREE_OPERAND (ref, 1); - - if (!DECL_ARTIFICIAL (field)) - { - tree type = TREE_TYPE (field); - if (TREE_CODE (type) == RECORD_TYPE) - return TYPE_BINFO (type); - else - return NULL_TREE; - } - - par_type = TREE_TYPE (TREE_OPERAND (ref, 0)); - binfo = TYPE_BINFO (par_type); - if (!binfo - || BINFO_N_BASE_BINFOS (binfo) == 0) - return NULL_TREE; - - base_binfo = BINFO_BASE_BINFO (binfo, 0); - if (BINFO_TYPE (base_binfo) != TREE_TYPE (field)) - { - tree d_binfo; - - d_binfo = gimple_get_relevant_ref_binfo (TREE_OPERAND (ref, 0), - known_binfo); - /* Get descendant binfo. */ - if (!d_binfo) - return NULL_TREE; - return get_base_binfo_for_type (d_binfo, TREE_TYPE (field)); - } - - ref = TREE_OPERAND (ref, 0); - } - else if (DECL_P (ref) && TREE_CODE (TREE_TYPE (ref)) == RECORD_TYPE) - return TYPE_BINFO (TREE_TYPE (ref)); - else if (known_binfo - && (TREE_CODE (ref) == SSA_NAME - || TREE_CODE (ref) == INDIRECT_REF)) - return known_binfo; - else - return NULL_TREE; - } -} + HOST_WIDE_INT offset, size, max_size; + tree base, type, expected_type, binfo; + bool last_artificial = false; -/* Fold a OBJ_TYPE_REF expression to the address of a function. TOKEN is - integer form of OBJ_TYPE_REF_TOKEN of the reference expression. KNOWN_BINFO - carries the binfo describing the true type of OBJ_TYPE_REF_OBJECT(REF). */ + if (!flag_devirtualize + || TREE_CODE (cst) != ADDR_EXPR + || TREE_CODE (TREE_TYPE (TREE_TYPE (cst))) != RECORD_TYPE) + return NULL_TREE; -tree -gimple_fold_obj_type_ref_known_binfo (HOST_WIDE_INT token, tree known_binfo) -{ - HOST_WIDE_INT i; - tree v, fndecl; + cst = TREE_OPERAND (cst, 0); + expected_type = TREE_TYPE (cst); + base = get_ref_base_and_extent (cst, &offset, &size, &max_size); + type = TREE_TYPE (base); + if (!DECL_P (base) + || max_size == -1 + || max_size != size + || TREE_CODE (type) != RECORD_TYPE) + return NULL_TREE; - v = BINFO_VIRTUALS (known_binfo); - i = 0; - while (i != token) + /* Find the sub-object the constant actually refers to and mark whether it is + an artificial one (as opposed to a user-defined one). */ + while (true) { - i += (TARGET_VTABLE_USES_DESCRIPTORS - ? TARGET_VTABLE_USES_DESCRIPTORS : 1); - v = TREE_CHAIN (v); - } - - fndecl = TREE_VALUE (v); - return build_fold_addr_expr (fndecl); -} + HOST_WIDE_INT pos, size; + tree fld; + if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (expected_type)) + break; + if (offset < 0) + return NULL_TREE; -/* Fold a OBJ_TYPE_REF expression to the address of a function. If KNOWN_TYPE - is not NULL_TREE, it is the true type of the outmost encapsulating object if - that comes from a pointer SSA_NAME. If the true outmost encapsulating type - can be determined from a declaration OBJ_TYPE_REF_OBJECT(REF), it is used - regardless of KNOWN_TYPE (which thus can be NULL_TREE). */ - -tree -gimple_fold_obj_type_ref (tree ref, tree known_type) -{ - tree obj = OBJ_TYPE_REF_OBJECT (ref); - tree known_binfo = known_type ? TYPE_BINFO (known_type) : NULL_TREE; - tree binfo; + for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld)) + { + if (TREE_CODE (fld) != FIELD_DECL) + continue; - if (TREE_CODE (obj) == ADDR_EXPR) - obj = TREE_OPERAND (obj, 0); + pos = int_bit_position (fld); + size = tree_low_cst (DECL_SIZE (fld), 1); + if (pos <= offset && (pos + size) > offset) + break; + } + if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE) + return NULL_TREE; - binfo = gimple_get_relevant_ref_binfo (obj, known_binfo); - if (binfo) - { - HOST_WIDE_INT token = tree_low_cst (OBJ_TYPE_REF_TOKEN (ref), 1); - return gimple_fold_obj_type_ref_known_binfo (token, binfo); + last_artificial = DECL_ARTIFICIAL (fld); + type = TREE_TYPE (fld); + offset -= pos; } - else + /* Artifical sub-objects are ancestors, we do not want to use them for + devirtualization, at least not here. */ + if (last_artificial) return NULL_TREE; + binfo = TYPE_BINFO (type); + if (!binfo || BINFO_N_BASE_BINFOS (binfo) > 0) + return NULL_TREE; + else + return binfo; } /* Attempt to fold a call statement referenced by the statement iterator GSI. @@ -1538,48 +1077,70 @@ gimple_fold_obj_type_ref (tree ref, tree known_type) It is assumed that the operands have been previously folded. */ static bool -fold_gimple_call (gimple_stmt_iterator *gsi) +gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace) { gimple stmt = gsi_stmt (*gsi); + tree callee; + bool changed = false; + unsigned i; + + /* Fold *& in call arguments. */ + for (i = 0; i < gimple_call_num_args (stmt); ++i) + if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i))) + { + tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false); + if (tmp) + { + gimple_call_set_arg (stmt, i, tmp); + changed = true; + } + } + + /* Check for virtual calls that became direct calls. */ + callee = gimple_call_fn (stmt); + if (callee && TREE_CODE (callee) == OBJ_TYPE_REF) + { + if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE) + { + gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee)); + changed = true; + } + else + { + tree obj = OBJ_TYPE_REF_OBJECT (callee); + tree binfo = gimple_extract_devirt_binfo_from_cst (obj); + if (binfo) + { + HOST_WIDE_INT token + = TREE_INT_CST_LOW (OBJ_TYPE_REF_TOKEN (callee)); + tree fndecl = gimple_get_virt_method_for_binfo (token, binfo); + if (fndecl) + { + gimple_call_set_fndecl (stmt, fndecl); + changed = true; + } + } + } + } - tree callee = gimple_call_fndecl (stmt); + if (inplace) + return changed; /* Check for builtins that CCP can handle using information not available in the generic fold routines. */ + callee = gimple_call_fndecl (stmt); if (callee && DECL_BUILT_IN (callee)) { tree result = gimple_fold_builtin (stmt); - if (result) { if (!update_call_from_tree (gsi, result)) gimplify_and_update_call_from_tree (gsi, result); - return true; + changed = true; } } - else - { - /* ??? Should perhaps do this in fold proper. However, doing it - there requires that we create a new CALL_EXPR, and that requires - copying EH region info to the new node. Easier to just do it - here where we can just smash the call operand. */ - /* ??? Is there a good reason not to do this in fold_stmt_inplace? */ - callee = gimple_call_fn (stmt); - if (TREE_CODE (callee) == OBJ_TYPE_REF - && TREE_CODE (OBJ_TYPE_REF_OBJECT (callee)) == ADDR_EXPR) - { - tree t; - - t = gimple_fold_obj_type_ref (callee, NULL_TREE); - if (t) - { - gimple_call_set_fn (stmt, t); - return true; - } - } - } - return false; + return changed; } /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument @@ -1591,6 +1152,11 @@ fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace) bool changed = false; gimple stmt = gsi_stmt (*gsi); unsigned i; + gimple_stmt_iterator gsinext = *gsi; + gimple next_stmt; + + gsi_next (&gsinext); + next_stmt = gsi_end_p (gsinext) ? NULL : gsi_stmt (gsinext); /* Fold the main computation performed by the statement. */ switch (gimple_code (stmt)) @@ -1598,8 +1164,22 @@ fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace) case GIMPLE_ASSIGN: { unsigned old_num_ops = gimple_num_ops (stmt); - tree new_rhs = fold_gimple_assign (gsi); + enum tree_code subcode = gimple_assign_rhs_code (stmt); tree lhs = gimple_assign_lhs (stmt); + tree new_rhs; + /* First canonicalize operand order. This avoids building new + trees if this is the only thing fold would later do. */ + if ((commutative_tree_code (subcode) + || commutative_ternary_tree_code (subcode)) + && tree_swap_operands_p (gimple_assign_rhs1 (stmt), + gimple_assign_rhs2 (stmt), false)) + { + tree tem = gimple_assign_rhs1 (stmt); + gimple_assign_set_rhs1 (stmt, gimple_assign_rhs2 (stmt)); + gimple_assign_set_rhs2 (stmt, tem); + changed = true; + } + new_rhs = fold_gimple_assign (gsi); if (new_rhs && !useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (new_rhs))) @@ -1619,44 +1199,77 @@ fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace) break; case GIMPLE_CALL: - /* Fold *& in call arguments. */ - for (i = 0; i < gimple_call_num_args (stmt); ++i) - if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i))) + changed |= gimple_fold_call (gsi, inplace); + break; + + case GIMPLE_ASM: + /* Fold *& in asm operands. */ + { + size_t noutputs; + const char **oconstraints; + const char *constraint; + bool allows_mem, allows_reg; + + noutputs = gimple_asm_noutputs (stmt); + oconstraints = XALLOCAVEC (const char *, noutputs); + + for (i = 0; i < gimple_asm_noutputs (stmt); ++i) { - tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false); - if (tmp) + tree link = gimple_asm_output_op (stmt, i); + tree op = TREE_VALUE (link); + oconstraints[i] + = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link))); + if (REFERENCE_CLASS_P (op) + && (op = maybe_fold_reference (op, true)) != NULL_TREE) { - gimple_call_set_arg (stmt, i, tmp); + TREE_VALUE (link) = op; changed = true; } } - /* The entire statement may be replaced in this case. */ - if (!inplace) - changed |= fold_gimple_call (gsi); + for (i = 0; i < gimple_asm_ninputs (stmt); ++i) + { + tree link = gimple_asm_input_op (stmt, i); + tree op = TREE_VALUE (link); + constraint + = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link))); + parse_input_constraint (&constraint, 0, 0, noutputs, 0, + oconstraints, &allows_mem, &allows_reg); + if (REFERENCE_CLASS_P (op) + && (op = maybe_fold_reference (op, !allows_reg && allows_mem)) + != NULL_TREE) + { + TREE_VALUE (link) = op; + changed = true; + } + } + } break; - case GIMPLE_ASM: - /* Fold *& in asm operands. */ - for (i = 0; i < gimple_asm_noutputs (stmt); ++i) + case GIMPLE_DEBUG: + if (gimple_debug_bind_p (stmt)) { - tree link = gimple_asm_output_op (stmt, i); - tree op = TREE_VALUE (link); - if (REFERENCE_CLASS_P (op) - && (op = maybe_fold_reference (op, true)) != NULL_TREE) + tree val = gimple_debug_bind_get_value (stmt); + if (val + && REFERENCE_CLASS_P (val)) { - TREE_VALUE (link) = op; - changed = true; + tree tem = maybe_fold_reference (val, false); + if (tem) + { + gimple_debug_bind_set_value (stmt, tem); + changed = true; + } } - } - for (i = 0; i < gimple_asm_ninputs (stmt); ++i) - { - tree link = gimple_asm_input_op (stmt, i); - tree op = TREE_VALUE (link); - if (REFERENCE_CLASS_P (op) - && (op = maybe_fold_reference (op, false)) != NULL_TREE) + else if (val + && TREE_CODE (val) == ADDR_EXPR) { - TREE_VALUE (link) = op; - changed = true; + tree ref = TREE_OPERAND (val, 0); + tree tem = maybe_fold_reference (ref, false); + if (tem) + { + tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val)); + gimple_debug_bind_set_value (stmt, tem); + changed = true; + } } } break; @@ -1664,10 +1277,19 @@ fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace) default:; } + /* If stmt folds into nothing and it was the last stmt in a bb, + don't call gsi_stmt. */ + if (gsi_end_p (*gsi)) + { + gcc_assert (next_stmt == NULL); + return changed; + } + stmt = gsi_stmt (*gsi); - /* Fold *& on the lhs. */ - if (gimple_has_lhs (stmt)) + /* Fold *& on the lhs. Don't do this if stmt folded into nothing, + as we'd changing the next stmt. */ + if (gimple_has_lhs (stmt) && stmt != next_stmt) { tree lhs = gimple_get_lhs (stmt); if (lhs && REFERENCE_CLASS_P (lhs)) @@ -1697,20 +1319,1945 @@ fold_stmt (gimple_stmt_iterator *gsi) return fold_stmt_1 (gsi, false); } -/* Perform the minimal folding on statement STMT. Only operations like +/* Perform the minimal folding on statement *GSI. Only operations like *&x created by constant propagation are handled. The statement cannot be replaced with a new one. Return true if the statement was changed, false otherwise. - The statement STMT should be in valid gimple form but may + The statement *GSI should be in valid gimple form but may be in unfolded state as resulting from for example constant propagation which can produce *&x = 0. */ bool -fold_stmt_inplace (gimple stmt) +fold_stmt_inplace (gimple_stmt_iterator *gsi) { - gimple_stmt_iterator gsi = gsi_for_stmt (stmt); - bool changed = fold_stmt_1 (&gsi, true); - gcc_assert (gsi_stmt (gsi) == stmt); + gimple stmt = gsi_stmt (*gsi); + bool changed = fold_stmt_1 (gsi, true); + gcc_assert (gsi_stmt (*gsi) == stmt); return changed; } +/* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE + if EXPR is null or we don't know how. + If non-null, the result always has boolean type. */ + +static tree +canonicalize_bool (tree expr, bool invert) +{ + if (!expr) + return NULL_TREE; + else if (invert) + { + if (integer_nonzerop (expr)) + return boolean_false_node; + else if (integer_zerop (expr)) + return boolean_true_node; + else if (TREE_CODE (expr) == SSA_NAME) + return fold_build2 (EQ_EXPR, boolean_type_node, expr, + build_int_cst (TREE_TYPE (expr), 0)); + else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison) + return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false), + boolean_type_node, + TREE_OPERAND (expr, 0), + TREE_OPERAND (expr, 1)); + else + return NULL_TREE; + } + else + { + if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE) + return expr; + if (integer_nonzerop (expr)) + return boolean_true_node; + else if (integer_zerop (expr)) + return boolean_false_node; + else if (TREE_CODE (expr) == SSA_NAME) + return fold_build2 (NE_EXPR, boolean_type_node, expr, + build_int_cst (TREE_TYPE (expr), 0)); + else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison) + return fold_build2 (TREE_CODE (expr), + boolean_type_node, + TREE_OPERAND (expr, 0), + TREE_OPERAND (expr, 1)); + else + return NULL_TREE; + } +} + +/* Check to see if a boolean expression EXPR is logically equivalent to the + comparison (OP1 CODE OP2). Check for various identities involving + SSA_NAMEs. */ + +static bool +same_bool_comparison_p (const_tree expr, enum tree_code code, + const_tree op1, const_tree op2) +{ + gimple s; + + /* The obvious case. */ + if (TREE_CODE (expr) == code + && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0) + && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0)) + return true; + + /* Check for comparing (name, name != 0) and the case where expr + is an SSA_NAME with a definition matching the comparison. */ + if (TREE_CODE (expr) == SSA_NAME + && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE) + { + if (operand_equal_p (expr, op1, 0)) + return ((code == NE_EXPR && integer_zerop (op2)) + || (code == EQ_EXPR && integer_nonzerop (op2))); + s = SSA_NAME_DEF_STMT (expr); + if (is_gimple_assign (s) + && gimple_assign_rhs_code (s) == code + && operand_equal_p (gimple_assign_rhs1 (s), op1, 0) + && operand_equal_p (gimple_assign_rhs2 (s), op2, 0)) + return true; + } + + /* If op1 is of the form (name != 0) or (name == 0), and the definition + of name is a comparison, recurse. */ + if (TREE_CODE (op1) == SSA_NAME + && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE) + { + s = SSA_NAME_DEF_STMT (op1); + if (is_gimple_assign (s) + && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison) + { + enum tree_code c = gimple_assign_rhs_code (s); + if ((c == NE_EXPR && integer_zerop (op2)) + || (c == EQ_EXPR && integer_nonzerop (op2))) + return same_bool_comparison_p (expr, c, + gimple_assign_rhs1 (s), + gimple_assign_rhs2 (s)); + if ((c == EQ_EXPR && integer_zerop (op2)) + || (c == NE_EXPR && integer_nonzerop (op2))) + return same_bool_comparison_p (expr, + invert_tree_comparison (c, false), + gimple_assign_rhs1 (s), + gimple_assign_rhs2 (s)); + } + } + return false; +} + +/* Check to see if two boolean expressions OP1 and OP2 are logically + equivalent. */ + +static bool +same_bool_result_p (const_tree op1, const_tree op2) +{ + /* Simple cases first. */ + if (operand_equal_p (op1, op2, 0)) + return true; + + /* Check the cases where at least one of the operands is a comparison. + These are a bit smarter than operand_equal_p in that they apply some + identifies on SSA_NAMEs. */ + if (TREE_CODE_CLASS (TREE_CODE (op2)) == tcc_comparison + && same_bool_comparison_p (op1, TREE_CODE (op2), + TREE_OPERAND (op2, 0), + TREE_OPERAND (op2, 1))) + return true; + if (TREE_CODE_CLASS (TREE_CODE (op1)) == tcc_comparison + && same_bool_comparison_p (op2, TREE_CODE (op1), + TREE_OPERAND (op1, 0), + TREE_OPERAND (op1, 1))) + return true; + + /* Default case. */ + return false; +} + +/* Forward declarations for some mutually recursive functions. */ + +static tree +and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b, + enum tree_code code2, tree op2a, tree op2b); +static tree +and_var_with_comparison (tree var, bool invert, + enum tree_code code2, tree op2a, tree op2b); +static tree +and_var_with_comparison_1 (gimple stmt, + enum tree_code code2, tree op2a, tree op2b); +static tree +or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b, + enum tree_code code2, tree op2a, tree op2b); +static tree +or_var_with_comparison (tree var, bool invert, + enum tree_code code2, tree op2a, tree op2b); +static tree +or_var_with_comparison_1 (gimple stmt, + enum tree_code code2, tree op2a, tree op2b); + +/* Helper function for and_comparisons_1: try to simplify the AND of the + ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B). + If INVERT is true, invert the value of the VAR before doing the AND. + Return NULL_EXPR if we can't simplify this to a single expression. */ + +static tree +and_var_with_comparison (tree var, bool invert, + enum tree_code code2, tree op2a, tree op2b) +{ + tree t; + gimple stmt = SSA_NAME_DEF_STMT (var); + + /* We can only deal with variables whose definitions are assignments. */ + if (!is_gimple_assign (stmt)) + return NULL_TREE; + + /* If we have an inverted comparison, apply DeMorgan's law and rewrite + !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b)) + Then we only have to consider the simpler non-inverted cases. */ + if (invert) + t = or_var_with_comparison_1 (stmt, + invert_tree_comparison (code2, false), + op2a, op2b); + else + t = and_var_with_comparison_1 (stmt, code2, op2a, op2b); + return canonicalize_bool (t, invert); +} + +/* Try to simplify the AND of the ssa variable defined by the assignment + STMT with the comparison specified by (OP2A CODE2 OP2B). + Return NULL_EXPR if we can't simplify this to a single expression. */ + +static tree +and_var_with_comparison_1 (gimple stmt, + enum tree_code code2, tree op2a, tree op2b) +{ + tree var = gimple_assign_lhs (stmt); + tree true_test_var = NULL_TREE; + tree false_test_var = NULL_TREE; + enum tree_code innercode = gimple_assign_rhs_code (stmt); + + /* Check for identities like (var AND (var == 0)) => false. */ + if (TREE_CODE (op2a) == SSA_NAME + && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE) + { + if ((code2 == NE_EXPR && integer_zerop (op2b)) + || (code2 == EQ_EXPR && integer_nonzerop (op2b))) + { + true_test_var = op2a; + if (var == true_test_var) + return var; + } + else if ((code2 == EQ_EXPR && integer_zerop (op2b)) + || (code2 == NE_EXPR && integer_nonzerop (op2b))) + { + false_test_var = op2a; + if (var == false_test_var) + return boolean_false_node; + } + } + + /* If the definition is a comparison, recurse on it. */ + if (TREE_CODE_CLASS (innercode) == tcc_comparison) + { + tree t = and_comparisons_1 (innercode, + gimple_assign_rhs1 (stmt), + gimple_assign_rhs2 (stmt), + code2, + op2a, + op2b); + if (t) + return t; + } + + /* If the definition is an AND or OR expression, we may be able to + simplify by reassociating. */ + if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE + && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR)) + { + tree inner1 = gimple_assign_rhs1 (stmt); + tree inner2 = gimple_assign_rhs2 (stmt); + gimple s; + tree t; + tree partial = NULL_TREE; + bool is_and = (innercode == BIT_AND_EXPR); + + /* Check for boolean identities that don't require recursive examination + of inner1/inner2: + inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var + inner1 AND (inner1 OR inner2) => inner1 + !inner1 AND (inner1 AND inner2) => false + !inner1 AND (inner1 OR inner2) => !inner1 AND inner2 + Likewise for similar cases involving inner2. */ + if (inner1 == true_test_var) + return (is_and ? var : inner1); + else if (inner2 == true_test_var) + return (is_and ? var : inner2); + else if (inner1 == false_test_var) + return (is_and + ? boolean_false_node + : and_var_with_comparison (inner2, false, code2, op2a, op2b)); + else if (inner2 == false_test_var) + return (is_and + ? boolean_false_node + : and_var_with_comparison (inner1, false, code2, op2a, op2b)); + + /* Next, redistribute/reassociate the AND across the inner tests. + Compute the first partial result, (inner1 AND (op2a code op2b)) */ + if (TREE_CODE (inner1) == SSA_NAME + && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1)) + && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison + && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s), + gimple_assign_rhs1 (s), + gimple_assign_rhs2 (s), + code2, op2a, op2b))) + { + /* Handle the AND case, where we are reassociating: + (inner1 AND inner2) AND (op2a code2 op2b) + => (t AND inner2) + If the partial result t is a constant, we win. Otherwise + continue on to try reassociating with the other inner test. */ + if (is_and) + { + if (integer_onep (t)) + return inner2; + else if (integer_zerop (t)) + return boolean_false_node; + } + + /* Handle the OR case, where we are redistributing: + (inner1 OR inner2) AND (op2a code2 op2b) + => (t OR (inner2 AND (op2a code2 op2b))) */ + else if (integer_onep (t)) + return boolean_true_node; + + /* Save partial result for later. */ + partial = t; + } + + /* Compute the second partial result, (inner2 AND (op2a code op2b)) */ + if (TREE_CODE (inner2) == SSA_NAME + && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2)) + && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison + && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s), + gimple_assign_rhs1 (s), + gimple_assign_rhs2 (s), + code2, op2a, op2b))) + { + /* Handle the AND case, where we are reassociating: + (inner1 AND inner2) AND (op2a code2 op2b) + => (inner1 AND t) */ + if (is_and) + { + if (integer_onep (t)) + return inner1; + else if (integer_zerop (t)) + return boolean_false_node; + /* If both are the same, we can apply the identity + (x AND x) == x. */ + else if (partial && same_bool_result_p (t, partial)) + return t; + } + + /* Handle the OR case. where we are redistributing: + (inner1 OR inner2) AND (op2a code2 op2b) + => (t OR (inner1 AND (op2a code2 op2b))) + => (t OR partial) */ + else + { + if (integer_onep (t)) + return boolean_true_node; + else if (partial) + { + /* We already got a simplification for the other + operand to the redistributed OR expression. The + interesting case is when at least one is false. + Or, if both are the same, we can apply the identity + (x OR x) == x. */ + if (integer_zerop (partial)) + return t; + else if (integer_zerop (t)) + return partial; + else if (same_bool_result_p (t, partial)) + return t; + } + } + } + } + return NULL_TREE; +} + +/* Try to simplify the AND of two comparisons defined by + (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively. + If this can be done without constructing an intermediate value, + return the resulting tree; otherwise NULL_TREE is returned. + This function is deliberately asymmetric as it recurses on SSA_DEFs + in the first comparison but not the second. */ + +static tree +and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b, + enum tree_code code2, tree op2a, tree op2b) +{ + /* First check for ((x CODE1 y) AND (x CODE2 y)). */ + if (operand_equal_p (op1a, op2a, 0) + && operand_equal_p (op1b, op2b, 0)) + { + /* Result will be either NULL_TREE, or a combined comparison. */ + tree t = combine_comparisons (UNKNOWN_LOCATION, + TRUTH_ANDIF_EXPR, code1, code2, + boolean_type_node, op1a, op1b); + if (t) + return t; + } + + /* Likewise the swapped case of the above. */ + if (operand_equal_p (op1a, op2b, 0) + && operand_equal_p (op1b, op2a, 0)) + { + /* Result will be either NULL_TREE, or a combined comparison. */ + tree t = combine_comparisons (UNKNOWN_LOCATION, + TRUTH_ANDIF_EXPR, code1, + swap_tree_comparison (code2), + boolean_type_node, op1a, op1b); + if (t) + return t; + } + + /* If both comparisons are of the same value against constants, we might + be able to merge them. */ + if (operand_equal_p (op1a, op2a, 0) + && TREE_CODE (op1b) == INTEGER_CST + && TREE_CODE (op2b) == INTEGER_CST) + { + int cmp = tree_int_cst_compare (op1b, op2b); + + /* If we have (op1a == op1b), we should either be able to + return that or FALSE, depending on whether the constant op1b + also satisfies the other comparison against op2b. */ + if (code1 == EQ_EXPR) + { + bool done = true; + bool val; + switch (code2) + { + case EQ_EXPR: val = (cmp == 0); break; + case NE_EXPR: val = (cmp != 0); break; + case LT_EXPR: val = (cmp < 0); break; + case GT_EXPR: val = (cmp > 0); break; + case LE_EXPR: val = (cmp <= 0); break; + case GE_EXPR: val = (cmp >= 0); break; + default: done = false; + } + if (done) + { + if (val) + return fold_build2 (code1, boolean_type_node, op1a, op1b); + else + return boolean_false_node; + } + } + /* Likewise if the second comparison is an == comparison. */ + else if (code2 == EQ_EXPR) + { + bool done = true; + bool val; + switch (code1) + { + case EQ_EXPR: val = (cmp == 0); break; + case NE_EXPR: val = (cmp != 0); break; + case LT_EXPR: val = (cmp > 0); break; + case GT_EXPR: val = (cmp < 0); break; + case LE_EXPR: val = (cmp >= 0); break; + case GE_EXPR: val = (cmp <= 0); break; + default: done = false; + } + if (done) + { + if (val) + return fold_build2 (code2, boolean_type_node, op2a, op2b); + else + return boolean_false_node; + } + } + + /* Same business with inequality tests. */ + else if (code1 == NE_EXPR) + { + bool val; + switch (code2) + { + case EQ_EXPR: val = (cmp != 0); break; + case NE_EXPR: val = (cmp == 0); break; + case LT_EXPR: val = (cmp >= 0); break; + case GT_EXPR: val = (cmp <= 0); break; + case LE_EXPR: val = (cmp > 0); break; + case GE_EXPR: val = (cmp < 0); break; + default: + val = false; + } + if (val) + return fold_build2 (code2, boolean_type_node, op2a, op2b); + } + else if (code2 == NE_EXPR) + { + bool val; + switch (code1) + { + case EQ_EXPR: val = (cmp == 0); break; + case NE_EXPR: val = (cmp != 0); break; + case LT_EXPR: val = (cmp <= 0); break; + case GT_EXPR: val = (cmp >= 0); break; + case LE_EXPR: val = (cmp < 0); break; + case GE_EXPR: val = (cmp > 0); break; + default: + val = false; + } + if (val) + return fold_build2 (code1, boolean_type_node, op1a, op1b); + } + + /* Chose the more restrictive of two < or <= comparisons. */ + else if ((code1 == LT_EXPR || code1 == LE_EXPR) + && (code2 == LT_EXPR || code2 == LE_EXPR)) + { + if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR)) + return fold_build2 (code1, boolean_type_node, op1a, op1b); + else + return fold_build2 (code2, boolean_type_node, op2a, op2b); + } + + /* Likewise chose the more restrictive of two > or >= comparisons. */ + else if ((code1 == GT_EXPR || code1 == GE_EXPR) + && (code2 == GT_EXPR || code2 == GE_EXPR)) + { + if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR)) + return fold_build2 (code1, boolean_type_node, op1a, op1b); + else + return fold_build2 (code2, boolean_type_node, op2a, op2b); + } + + /* Check for singleton ranges. */ + else if (cmp == 0 + && ((code1 == LE_EXPR && code2 == GE_EXPR) + || (code1 == GE_EXPR && code2 == LE_EXPR))) + return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b); + + /* Check for disjoint ranges. */ + else if (cmp <= 0 + && (code1 == LT_EXPR || code1 == LE_EXPR) + && (code2 == GT_EXPR || code2 == GE_EXPR)) + return boolean_false_node; + else if (cmp >= 0 + && (code1 == GT_EXPR || code1 == GE_EXPR) + && (code2 == LT_EXPR || code2 == LE_EXPR)) + return boolean_false_node; + } + + /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where + NAME's definition is a truth value. See if there are any simplifications + that can be done against the NAME's definition. */ + if (TREE_CODE (op1a) == SSA_NAME + && (code1 == NE_EXPR || code1 == EQ_EXPR) + && (integer_zerop (op1b) || integer_onep (op1b))) + { + bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b)) + || (code1 == NE_EXPR && integer_onep (op1b))); + gimple stmt = SSA_NAME_DEF_STMT (op1a); + switch (gimple_code (stmt)) + { + case GIMPLE_ASSIGN: + /* Try to simplify by copy-propagating the definition. */ + return and_var_with_comparison (op1a, invert, code2, op2a, op2b); + + case GIMPLE_PHI: + /* If every argument to the PHI produces the same result when + ANDed with the second comparison, we win. + Do not do this unless the type is bool since we need a bool + result here anyway. */ + if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE) + { + tree result = NULL_TREE; + unsigned i; + for (i = 0; i < gimple_phi_num_args (stmt); i++) + { + tree arg = gimple_phi_arg_def (stmt, i); + + /* If this PHI has itself as an argument, ignore it. + If all the other args produce the same result, + we're still OK. */ + if (arg == gimple_phi_result (stmt)) + continue; + else if (TREE_CODE (arg) == INTEGER_CST) + { + if (invert ? integer_nonzerop (arg) : integer_zerop (arg)) + { + if (!result) + result = boolean_false_node; + else if (!integer_zerop (result)) + return NULL_TREE; + } + else if (!result) + result = fold_build2 (code2, boolean_type_node, + op2a, op2b); + else if (!same_bool_comparison_p (result, + code2, op2a, op2b)) + return NULL_TREE; + } + else if (TREE_CODE (arg) == SSA_NAME + && !SSA_NAME_IS_DEFAULT_DEF (arg)) + { + tree temp; + gimple def_stmt = SSA_NAME_DEF_STMT (arg); + /* In simple cases we can look through PHI nodes, + but we have to be careful with loops. + See PR49073. */ + if (! dom_info_available_p (CDI_DOMINATORS) + || gimple_bb (def_stmt) == gimple_bb (stmt) + || dominated_by_p (CDI_DOMINATORS, + gimple_bb (def_stmt), + gimple_bb (stmt))) + return NULL_TREE; + temp = and_var_with_comparison (arg, invert, code2, + op2a, op2b); + if (!temp) + return NULL_TREE; + else if (!result) + result = temp; + else if (!same_bool_result_p (result, temp)) + return NULL_TREE; + } + else + return NULL_TREE; + } + return result; + } + + default: + break; + } + } + return NULL_TREE; +} + +/* Try to simplify the AND of two comparisons, specified by + (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively. + If this can be simplified to a single expression (without requiring + introducing more SSA variables to hold intermediate values), + return the resulting tree. Otherwise return NULL_TREE. + If the result expression is non-null, it has boolean type. */ + +tree +maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b, + enum tree_code code2, tree op2a, tree op2b) +{ + tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b); + if (t) + return t; + else + return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b); +} + +/* Helper function for or_comparisons_1: try to simplify the OR of the + ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B). + If INVERT is true, invert the value of VAR before doing the OR. + Return NULL_EXPR if we can't simplify this to a single expression. */ + +static tree +or_var_with_comparison (tree var, bool invert, + enum tree_code code2, tree op2a, tree op2b) +{ + tree t; + gimple stmt = SSA_NAME_DEF_STMT (var); + + /* We can only deal with variables whose definitions are assignments. */ + if (!is_gimple_assign (stmt)) + return NULL_TREE; + + /* If we have an inverted comparison, apply DeMorgan's law and rewrite + !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b)) + Then we only have to consider the simpler non-inverted cases. */ + if (invert) + t = and_var_with_comparison_1 (stmt, + invert_tree_comparison (code2, false), + op2a, op2b); + else + t = or_var_with_comparison_1 (stmt, code2, op2a, op2b); + return canonicalize_bool (t, invert); +} + +/* Try to simplify the OR of the ssa variable defined by the assignment + STMT with the comparison specified by (OP2A CODE2 OP2B). + Return NULL_EXPR if we can't simplify this to a single expression. */ + +static tree +or_var_with_comparison_1 (gimple stmt, + enum tree_code code2, tree op2a, tree op2b) +{ + tree var = gimple_assign_lhs (stmt); + tree true_test_var = NULL_TREE; + tree false_test_var = NULL_TREE; + enum tree_code innercode = gimple_assign_rhs_code (stmt); + + /* Check for identities like (var OR (var != 0)) => true . */ + if (TREE_CODE (op2a) == SSA_NAME + && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE) + { + if ((code2 == NE_EXPR && integer_zerop (op2b)) + || (code2 == EQ_EXPR && integer_nonzerop (op2b))) + { + true_test_var = op2a; + if (var == true_test_var) + return var; + } + else if ((code2 == EQ_EXPR && integer_zerop (op2b)) + || (code2 == NE_EXPR && integer_nonzerop (op2b))) + { + false_test_var = op2a; + if (var == false_test_var) + return boolean_true_node; + } + } + + /* If the definition is a comparison, recurse on it. */ + if (TREE_CODE_CLASS (innercode) == tcc_comparison) + { + tree t = or_comparisons_1 (innercode, + gimple_assign_rhs1 (stmt), + gimple_assign_rhs2 (stmt), + code2, + op2a, + op2b); + if (t) + return t; + } + + /* If the definition is an AND or OR expression, we may be able to + simplify by reassociating. */ + if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE + && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR)) + { + tree inner1 = gimple_assign_rhs1 (stmt); + tree inner2 = gimple_assign_rhs2 (stmt); + gimple s; + tree t; + tree partial = NULL_TREE; + bool is_or = (innercode == BIT_IOR_EXPR); + + /* Check for boolean identities that don't require recursive examination + of inner1/inner2: + inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var + inner1 OR (inner1 AND inner2) => inner1 + !inner1 OR (inner1 OR inner2) => true + !inner1 OR (inner1 AND inner2) => !inner1 OR inner2 + */ + if (inner1 == true_test_var) + return (is_or ? var : inner1); + else if (inner2 == true_test_var) + return (is_or ? var : inner2); + else if (inner1 == false_test_var) + return (is_or + ? boolean_true_node + : or_var_with_comparison (inner2, false, code2, op2a, op2b)); + else if (inner2 == false_test_var) + return (is_or + ? boolean_true_node + : or_var_with_comparison (inner1, false, code2, op2a, op2b)); + + /* Next, redistribute/reassociate the OR across the inner tests. + Compute the first partial result, (inner1 OR (op2a code op2b)) */ + if (TREE_CODE (inner1) == SSA_NAME + && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1)) + && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison + && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s), + gimple_assign_rhs1 (s), + gimple_assign_rhs2 (s), + code2, op2a, op2b))) + { + /* Handle the OR case, where we are reassociating: + (inner1 OR inner2) OR (op2a code2 op2b) + => (t OR inner2) + If the partial result t is a constant, we win. Otherwise + continue on to try reassociating with the other inner test. */ + if (is_or) + { + if (integer_onep (t)) + return boolean_true_node; + else if (integer_zerop (t)) + return inner2; + } + + /* Handle the AND case, where we are redistributing: + (inner1 AND inner2) OR (op2a code2 op2b) + => (t AND (inner2 OR (op2a code op2b))) */ + else if (integer_zerop (t)) + return boolean_false_node; + + /* Save partial result for later. */ + partial = t; + } + + /* Compute the second partial result, (inner2 OR (op2a code op2b)) */ + if (TREE_CODE (inner2) == SSA_NAME + && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2)) + && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison + && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s), + gimple_assign_rhs1 (s), + gimple_assign_rhs2 (s), + code2, op2a, op2b))) + { + /* Handle the OR case, where we are reassociating: + (inner1 OR inner2) OR (op2a code2 op2b) + => (inner1 OR t) + => (t OR partial) */ + if (is_or) + { + if (integer_zerop (t)) + return inner1; + else if (integer_onep (t)) + return boolean_true_node; + /* If both are the same, we can apply the identity + (x OR x) == x. */ + else if (partial && same_bool_result_p (t, partial)) + return t; + } + + /* Handle the AND case, where we are redistributing: + (inner1 AND inner2) OR (op2a code2 op2b) + => (t AND (inner1 OR (op2a code2 op2b))) + => (t AND partial) */ + else + { + if (integer_zerop (t)) + return boolean_false_node; + else if (partial) + { + /* We already got a simplification for the other + operand to the redistributed AND expression. The + interesting case is when at least one is true. + Or, if both are the same, we can apply the identity + (x AND x) == x. */ + if (integer_onep (partial)) + return t; + else if (integer_onep (t)) + return partial; + else if (same_bool_result_p (t, partial)) + return t; + } + } + } + } + return NULL_TREE; +} + +/* Try to simplify the OR of two comparisons defined by + (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively. + If this can be done without constructing an intermediate value, + return the resulting tree; otherwise NULL_TREE is returned. + This function is deliberately asymmetric as it recurses on SSA_DEFs + in the first comparison but not the second. */ + +static tree +or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b, + enum tree_code code2, tree op2a, tree op2b) +{ + /* First check for ((x CODE1 y) OR (x CODE2 y)). */ + if (operand_equal_p (op1a, op2a, 0) + && operand_equal_p (op1b, op2b, 0)) + { + /* Result will be either NULL_TREE, or a combined comparison. */ + tree t = combine_comparisons (UNKNOWN_LOCATION, + TRUTH_ORIF_EXPR, code1, code2, + boolean_type_node, op1a, op1b); + if (t) + return t; + } + + /* Likewise the swapped case of the above. */ + if (operand_equal_p (op1a, op2b, 0) + && operand_equal_p (op1b, op2a, 0)) + { + /* Result will be either NULL_TREE, or a combined comparison. */ + tree t = combine_comparisons (UNKNOWN_LOCATION, + TRUTH_ORIF_EXPR, code1, + swap_tree_comparison (code2), + boolean_type_node, op1a, op1b); + if (t) + return t; + } + + /* If both comparisons are of the same value against constants, we might + be able to merge them. */ + if (operand_equal_p (op1a, op2a, 0) + && TREE_CODE (op1b) == INTEGER_CST + && TREE_CODE (op2b) == INTEGER_CST) + { + int cmp = tree_int_cst_compare (op1b, op2b); + + /* If we have (op1a != op1b), we should either be able to + return that or TRUE, depending on whether the constant op1b + also satisfies the other comparison against op2b. */ + if (code1 == NE_EXPR) + { + bool done = true; + bool val; + switch (code2) + { + case EQ_EXPR: val = (cmp == 0); break; + case NE_EXPR: val = (cmp != 0); break; + case LT_EXPR: val = (cmp < 0); break; + case GT_EXPR: val = (cmp > 0); break; + case LE_EXPR: val = (cmp <= 0); break; + case GE_EXPR: val = (cmp >= 0); break; + default: done = false; + } + if (done) + { + if (val) + return boolean_true_node; + else + return fold_build2 (code1, boolean_type_node, op1a, op1b); + } + } + /* Likewise if the second comparison is a != comparison. */ + else if (code2 == NE_EXPR) + { + bool done = true; + bool val; + switch (code1) + { + case EQ_EXPR: val = (cmp == 0); break; + case NE_EXPR: val = (cmp != 0); break; + case LT_EXPR: val = (cmp > 0); break; + case GT_EXPR: val = (cmp < 0); break; + case LE_EXPR: val = (cmp >= 0); break; + case GE_EXPR: val = (cmp <= 0); break; + default: done = false; + } + if (done) + { + if (val) + return boolean_true_node; + else + return fold_build2 (code2, boolean_type_node, op2a, op2b); + } + } + + /* See if an equality test is redundant with the other comparison. */ + else if (code1 == EQ_EXPR) + { + bool val; + switch (code2) + { + case EQ_EXPR: val = (cmp == 0); break; + case NE_EXPR: val = (cmp != 0); break; + case LT_EXPR: val = (cmp < 0); break; + case GT_EXPR: val = (cmp > 0); break; + case LE_EXPR: val = (cmp <= 0); break; + case GE_EXPR: val = (cmp >= 0); break; + default: + val = false; + } + if (val) + return fold_build2 (code2, boolean_type_node, op2a, op2b); + } + else if (code2 == EQ_EXPR) + { + bool val; + switch (code1) + { + case EQ_EXPR: val = (cmp == 0); break; + case NE_EXPR: val = (cmp != 0); break; + case LT_EXPR: val = (cmp > 0); break; + case GT_EXPR: val = (cmp < 0); break; + case LE_EXPR: val = (cmp >= 0); break; + case GE_EXPR: val = (cmp <= 0); break; + default: + val = false; + } + if (val) + return fold_build2 (code1, boolean_type_node, op1a, op1b); + } + + /* Chose the less restrictive of two < or <= comparisons. */ + else if ((code1 == LT_EXPR || code1 == LE_EXPR) + && (code2 == LT_EXPR || code2 == LE_EXPR)) + { + if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR)) + return fold_build2 (code2, boolean_type_node, op2a, op2b); + else + return fold_build2 (code1, boolean_type_node, op1a, op1b); + } + + /* Likewise chose the less restrictive of two > or >= comparisons. */ + else if ((code1 == GT_EXPR || code1 == GE_EXPR) + && (code2 == GT_EXPR || code2 == GE_EXPR)) + { + if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR)) + return fold_build2 (code2, boolean_type_node, op2a, op2b); + else + return fold_build2 (code1, boolean_type_node, op1a, op1b); + } + + /* Check for singleton ranges. */ + else if (cmp == 0 + && ((code1 == LT_EXPR && code2 == GT_EXPR) + || (code1 == GT_EXPR && code2 == LT_EXPR))) + return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b); + + /* Check for less/greater pairs that don't restrict the range at all. */ + else if (cmp >= 0 + && (code1 == LT_EXPR || code1 == LE_EXPR) + && (code2 == GT_EXPR || code2 == GE_EXPR)) + return boolean_true_node; + else if (cmp <= 0 + && (code1 == GT_EXPR || code1 == GE_EXPR) + && (code2 == LT_EXPR || code2 == LE_EXPR)) + return boolean_true_node; + } + + /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where + NAME's definition is a truth value. See if there are any simplifications + that can be done against the NAME's definition. */ + if (TREE_CODE (op1a) == SSA_NAME + && (code1 == NE_EXPR || code1 == EQ_EXPR) + && (integer_zerop (op1b) || integer_onep (op1b))) + { + bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b)) + || (code1 == NE_EXPR && integer_onep (op1b))); + gimple stmt = SSA_NAME_DEF_STMT (op1a); + switch (gimple_code (stmt)) + { + case GIMPLE_ASSIGN: + /* Try to simplify by copy-propagating the definition. */ + return or_var_with_comparison (op1a, invert, code2, op2a, op2b); + + case GIMPLE_PHI: + /* If every argument to the PHI produces the same result when + ORed with the second comparison, we win. + Do not do this unless the type is bool since we need a bool + result here anyway. */ + if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE) + { + tree result = NULL_TREE; + unsigned i; + for (i = 0; i < gimple_phi_num_args (stmt); i++) + { + tree arg = gimple_phi_arg_def (stmt, i); + + /* If this PHI has itself as an argument, ignore it. + If all the other args produce the same result, + we're still OK. */ + if (arg == gimple_phi_result (stmt)) + continue; + else if (TREE_CODE (arg) == INTEGER_CST) + { + if (invert ? integer_zerop (arg) : integer_nonzerop (arg)) + { + if (!result) + result = boolean_true_node; + else if (!integer_onep (result)) + return NULL_TREE; + } + else if (!result) + result = fold_build2 (code2, boolean_type_node, + op2a, op2b); + else if (!same_bool_comparison_p (result, + code2, op2a, op2b)) + return NULL_TREE; + } + else if (TREE_CODE (arg) == SSA_NAME + && !SSA_NAME_IS_DEFAULT_DEF (arg)) + { + tree temp; + gimple def_stmt = SSA_NAME_DEF_STMT (arg); + /* In simple cases we can look through PHI nodes, + but we have to be careful with loops. + See PR49073. */ + if (! dom_info_available_p (CDI_DOMINATORS) + || gimple_bb (def_stmt) == gimple_bb (stmt) + || dominated_by_p (CDI_DOMINATORS, + gimple_bb (def_stmt), + gimple_bb (stmt))) + return NULL_TREE; + temp = or_var_with_comparison (arg, invert, code2, + op2a, op2b); + if (!temp) + return NULL_TREE; + else if (!result) + result = temp; + else if (!same_bool_result_p (result, temp)) + return NULL_TREE; + } + else + return NULL_TREE; + } + return result; + } + + default: + break; + } + } + return NULL_TREE; +} + +/* Try to simplify the OR of two comparisons, specified by + (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively. + If this can be simplified to a single expression (without requiring + introducing more SSA variables to hold intermediate values), + return the resulting tree. Otherwise return NULL_TREE. + If the result expression is non-null, it has boolean type. */ + +tree +maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b, + enum tree_code code2, tree op2a, tree op2b) +{ + tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b); + if (t) + return t; + else + return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b); +} + + +/* Fold STMT to a constant using VALUEIZE to valueize SSA names. + + Either NULL_TREE, a simplified but non-constant or a constant + is returned. + + ??? This should go into a gimple-fold-inline.h file to be eventually + privatized with the single valueize function used in the various TUs + to avoid the indirect function call overhead. */ + +tree +gimple_fold_stmt_to_constant_1 (gimple stmt, tree (*valueize) (tree)) +{ + location_t loc = gimple_location (stmt); + switch (gimple_code (stmt)) + { + case GIMPLE_ASSIGN: + { + enum tree_code subcode = gimple_assign_rhs_code (stmt); + + switch (get_gimple_rhs_class (subcode)) + { + case GIMPLE_SINGLE_RHS: + { + tree rhs = gimple_assign_rhs1 (stmt); + enum tree_code_class kind = TREE_CODE_CLASS (subcode); + + if (TREE_CODE (rhs) == SSA_NAME) + { + /* If the RHS is an SSA_NAME, return its known constant value, + if any. */ + return (*valueize) (rhs); + } + /* Handle propagating invariant addresses into address + operations. */ + else if (TREE_CODE (rhs) == ADDR_EXPR + && !is_gimple_min_invariant (rhs)) + { + HOST_WIDE_INT offset; + tree base; + base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0), + &offset, + valueize); + if (base + && (CONSTANT_CLASS_P (base) + || decl_address_invariant_p (base))) + return build_invariant_address (TREE_TYPE (rhs), + base, offset); + } + else if (TREE_CODE (rhs) == CONSTRUCTOR + && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE + && (CONSTRUCTOR_NELTS (rhs) + == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs)))) + { + unsigned i; + tree val, list; + + list = NULL_TREE; + FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val) + { + val = (*valueize) (val); + if (TREE_CODE (val) == INTEGER_CST + || TREE_CODE (val) == REAL_CST + || TREE_CODE (val) == FIXED_CST) + list = tree_cons (NULL_TREE, val, list); + else + return NULL_TREE; + } + + return build_vector (TREE_TYPE (rhs), nreverse (list)); + } + + if (kind == tcc_reference) + { + if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR + || TREE_CODE (rhs) == REALPART_EXPR + || TREE_CODE (rhs) == IMAGPART_EXPR) + && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME) + { + tree val = (*valueize) (TREE_OPERAND (rhs, 0)); + return fold_unary_loc (EXPR_LOCATION (rhs), + TREE_CODE (rhs), + TREE_TYPE (rhs), val); + } + else if (TREE_CODE (rhs) == BIT_FIELD_REF + && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME) + { + tree val = (*valueize) (TREE_OPERAND (rhs, 0)); + return fold_ternary_loc (EXPR_LOCATION (rhs), + TREE_CODE (rhs), + TREE_TYPE (rhs), val, + TREE_OPERAND (rhs, 1), + TREE_OPERAND (rhs, 2)); + } + else if (TREE_CODE (rhs) == MEM_REF + && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME) + { + tree val = (*valueize) (TREE_OPERAND (rhs, 0)); + if (TREE_CODE (val) == ADDR_EXPR + && is_gimple_min_invariant (val)) + { + tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs), + unshare_expr (val), + TREE_OPERAND (rhs, 1)); + if (tem) + rhs = tem; + } + } + return fold_const_aggregate_ref_1 (rhs, valueize); + } + else if (kind == tcc_declaration) + return get_symbol_constant_value (rhs); + return rhs; + } + + case GIMPLE_UNARY_RHS: + { + /* Handle unary operators that can appear in GIMPLE form. + Note that we know the single operand must be a constant, + so this should almost always return a simplified RHS. */ + tree lhs = gimple_assign_lhs (stmt); + tree op0 = (*valueize) (gimple_assign_rhs1 (stmt)); + + /* Conversions are useless for CCP purposes if they are + value-preserving. Thus the restrictions that + useless_type_conversion_p places for restrict qualification + of pointer types should not apply here. + Substitution later will only substitute to allowed places. */ + if (CONVERT_EXPR_CODE_P (subcode) + && POINTER_TYPE_P (TREE_TYPE (lhs)) + && POINTER_TYPE_P (TREE_TYPE (op0)) + && TYPE_ADDR_SPACE (TREE_TYPE (lhs)) + == TYPE_ADDR_SPACE (TREE_TYPE (op0)) + && TYPE_MODE (TREE_TYPE (lhs)) + == TYPE_MODE (TREE_TYPE (op0))) + return op0; + + return + fold_unary_ignore_overflow_loc (loc, subcode, + gimple_expr_type (stmt), op0); + } + + case GIMPLE_BINARY_RHS: + { + /* Handle binary operators that can appear in GIMPLE form. */ + tree op0 = (*valueize) (gimple_assign_rhs1 (stmt)); + tree op1 = (*valueize) (gimple_assign_rhs2 (stmt)); + + /* Translate &x + CST into an invariant form suitable for + further propagation. */ + if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR + && TREE_CODE (op0) == ADDR_EXPR + && TREE_CODE (op1) == INTEGER_CST) + { + tree off = fold_convert (ptr_type_node, op1); + return build_fold_addr_expr_loc + (loc, + fold_build2 (MEM_REF, + TREE_TYPE (TREE_TYPE (op0)), + unshare_expr (op0), off)); + } + + return fold_binary_loc (loc, subcode, + gimple_expr_type (stmt), op0, op1); + } + + case GIMPLE_TERNARY_RHS: + { + /* Handle ternary operators that can appear in GIMPLE form. */ + tree op0 = (*valueize) (gimple_assign_rhs1 (stmt)); + tree op1 = (*valueize) (gimple_assign_rhs2 (stmt)); + tree op2 = (*valueize) (gimple_assign_rhs3 (stmt)); + + /* Fold embedded expressions in ternary codes. */ + if ((subcode == COND_EXPR + || subcode == VEC_COND_EXPR) + && COMPARISON_CLASS_P (op0)) + { + tree op00 = (*valueize) (TREE_OPERAND (op0, 0)); + tree op01 = (*valueize) (TREE_OPERAND (op0, 1)); + tree tem = fold_binary_loc (loc, TREE_CODE (op0), + TREE_TYPE (op0), op00, op01); + if (tem) + op0 = tem; + } + + return fold_ternary_loc (loc, subcode, + gimple_expr_type (stmt), op0, op1, op2); + } + + default: + gcc_unreachable (); + } + } + + case GIMPLE_CALL: + { + tree fn; + + if (gimple_call_internal_p (stmt)) + /* No folding yet for these functions. */ + return NULL_TREE; + + fn = (*valueize) (gimple_call_fn (stmt)); + if (TREE_CODE (fn) == ADDR_EXPR + && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL + && DECL_BUILT_IN (TREE_OPERAND (fn, 0))) + { + tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt)); + tree call, retval; + unsigned i; + for (i = 0; i < gimple_call_num_args (stmt); ++i) + args[i] = (*valueize) (gimple_call_arg (stmt, i)); + call = build_call_array_loc (loc, + gimple_call_return_type (stmt), + fn, gimple_call_num_args (stmt), args); + retval = fold_call_expr (EXPR_LOCATION (call), call, false); + if (retval) + /* fold_call_expr wraps the result inside a NOP_EXPR. */ + STRIP_NOPS (retval); + return retval; + } + return NULL_TREE; + } + + default: + return NULL_TREE; + } +} + +/* Fold STMT to a constant using VALUEIZE to valueize SSA names. + Returns NULL_TREE if folding to a constant is not possible, otherwise + returns a constant according to is_gimple_min_invariant. */ + +tree +gimple_fold_stmt_to_constant (gimple stmt, tree (*valueize) (tree)) +{ + tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize); + if (res && is_gimple_min_invariant (res)) + return res; + return NULL_TREE; +} + + +/* The following set of functions are supposed to fold references using + their constant initializers. */ + +static tree fold_ctor_reference (tree type, tree ctor, + unsigned HOST_WIDE_INT offset, + unsigned HOST_WIDE_INT size); + +/* See if we can find constructor defining value of BASE. + When we know the consructor with constant offset (such as + base is array[40] and we do know constructor of array), then + BIT_OFFSET is adjusted accordingly. + + As a special case, return error_mark_node when constructor + is not explicitly available, but it is known to be zero + such as 'static const int a;'. */ +static tree +get_base_constructor (tree base, HOST_WIDE_INT *bit_offset, + tree (*valueize)(tree)) +{ + HOST_WIDE_INT bit_offset2, size, max_size; + if (TREE_CODE (base) == MEM_REF) + { + if (!integer_zerop (TREE_OPERAND (base, 1))) + { + if (!host_integerp (TREE_OPERAND (base, 1), 0)) + return NULL_TREE; + *bit_offset += (mem_ref_offset (base).low + * BITS_PER_UNIT); + } + + if (valueize + && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME) + base = valueize (TREE_OPERAND (base, 0)); + if (!base || TREE_CODE (base) != ADDR_EXPR) + return NULL_TREE; + base = TREE_OPERAND (base, 0); + } + + /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its + DECL_INITIAL. If BASE is a nested reference into another + ARRAY_REF or COMPONENT_REF, make a recursive call to resolve + the inner reference. */ + switch (TREE_CODE (base)) + { + case VAR_DECL: + if (!const_value_known_p (base)) + return NULL_TREE; + + /* Fallthru. */ + case CONST_DECL: + if (!DECL_INITIAL (base) + && (TREE_STATIC (base) || DECL_EXTERNAL (base))) + return error_mark_node; + /* Do not return an error_mark_node DECL_INITIAL. LTO uses this + as special marker (_not_ zero ...) for its own purposes. */ + if (DECL_INITIAL (base) == error_mark_node) + return NULL_TREE; + return DECL_INITIAL (base); + + case ARRAY_REF: + case COMPONENT_REF: + base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size); + if (max_size == -1 || size != max_size) + return NULL_TREE; + *bit_offset += bit_offset2; + return get_base_constructor (base, bit_offset, valueize); + + case STRING_CST: + case CONSTRUCTOR: + return base; + + default: + return NULL_TREE; + } +} + +/* CTOR is STRING_CST. Fold reference of type TYPE and size SIZE + to the memory at bit OFFSET. + + We do only simple job of folding byte accesses. */ + +static tree +fold_string_cst_ctor_reference (tree type, tree ctor, + unsigned HOST_WIDE_INT offset, + unsigned HOST_WIDE_INT size) +{ + if (INTEGRAL_TYPE_P (type) + && (TYPE_MODE (type) + == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) + && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) + == MODE_INT) + && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1 + && size == BITS_PER_UNIT + && !(offset % BITS_PER_UNIT)) + { + offset /= BITS_PER_UNIT; + if (offset < (unsigned HOST_WIDE_INT) TREE_STRING_LENGTH (ctor)) + return build_int_cst_type (type, (TREE_STRING_POINTER (ctor) + [offset])); + /* Folding + const char a[20]="hello"; + return a[10]; + + might lead to offset greater than string length. In this case we + know value is either initialized to 0 or out of bounds. Return 0 + in both cases. */ + return build_zero_cst (type); + } + return NULL_TREE; +} + +/* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size + SIZE to the memory at bit OFFSET. */ + +static tree +fold_array_ctor_reference (tree type, tree ctor, + unsigned HOST_WIDE_INT offset, + unsigned HOST_WIDE_INT size) +{ + unsigned HOST_WIDE_INT cnt; + tree cfield, cval; + double_int low_bound, elt_size; + double_int index, max_index; + double_int access_index; + tree domain_type = NULL_TREE; + HOST_WIDE_INT inner_offset; + + /* Compute low bound and elt size. */ + if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE) + domain_type = TYPE_DOMAIN (TREE_TYPE (ctor)); + if (domain_type && TYPE_MIN_VALUE (domain_type)) + { + /* Static constructors for variably sized objects makes no sense. */ + gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST); + low_bound = tree_to_double_int (TYPE_MIN_VALUE (domain_type)); + } + else + low_bound = double_int_zero; + /* Static constructors for variably sized objects makes no sense. */ + gcc_assert (TREE_CODE(TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))) + == INTEGER_CST); + elt_size = + tree_to_double_int (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))); + + + /* We can handle only constantly sized accesses that are known to not + be larger than size of array element. */ + if (!TYPE_SIZE_UNIT (type) + || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST + || double_int_cmp (elt_size, + tree_to_double_int (TYPE_SIZE_UNIT (type)), 0) < 0) + return NULL_TREE; + + /* Compute the array index we look for. */ + access_index = double_int_udiv (uhwi_to_double_int (offset / BITS_PER_UNIT), + elt_size, TRUNC_DIV_EXPR); + access_index = double_int_add (access_index, low_bound); + + /* And offset within the access. */ + inner_offset = offset % (double_int_to_uhwi (elt_size) * BITS_PER_UNIT); + + /* See if the array field is large enough to span whole access. We do not + care to fold accesses spanning multiple array indexes. */ + if (inner_offset + size > double_int_to_uhwi (elt_size) * BITS_PER_UNIT) + return NULL_TREE; + + index = double_int_sub (low_bound, double_int_one); + FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval) + { + /* Array constructor might explicitely set index, or specify range + or leave index NULL meaning that it is next index after previous + one. */ + if (cfield) + { + if (TREE_CODE (cfield) == INTEGER_CST) + max_index = index = tree_to_double_int (cfield); + else + { + gcc_assert (TREE_CODE (cfield) == RANGE_EXPR); + index = tree_to_double_int (TREE_OPERAND (cfield, 0)); + max_index = tree_to_double_int (TREE_OPERAND (cfield, 1)); + } + } + else + max_index = index = double_int_add (index, double_int_one); + + /* Do we have match? */ + if (double_int_cmp (access_index, index, 1) >= 0 + && double_int_cmp (access_index, max_index, 1) <= 0) + return fold_ctor_reference (type, cval, inner_offset, size); + } + /* When memory is not explicitely mentioned in constructor, + it is 0 (or out of range). */ + return build_zero_cst (type); +} + +/* CTOR is CONSTRUCTOR of an aggregate or vector. + Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */ + +static tree +fold_nonarray_ctor_reference (tree type, tree ctor, + unsigned HOST_WIDE_INT offset, + unsigned HOST_WIDE_INT size) +{ + unsigned HOST_WIDE_INT cnt; + tree cfield, cval; + + FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, + cval) + { + tree byte_offset = DECL_FIELD_OFFSET (cfield); + tree field_offset = DECL_FIELD_BIT_OFFSET (cfield); + tree field_size = DECL_SIZE (cfield); + double_int bitoffset; + double_int byte_offset_cst = tree_to_double_int (byte_offset); + double_int bits_per_unit_cst = uhwi_to_double_int (BITS_PER_UNIT); + double_int bitoffset_end, access_end; + + /* Variable sized objects in static constructors makes no sense, + but field_size can be NULL for flexible array members. */ + gcc_assert (TREE_CODE (field_offset) == INTEGER_CST + && TREE_CODE (byte_offset) == INTEGER_CST + && (field_size != NULL_TREE + ? TREE_CODE (field_size) == INTEGER_CST + : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE)); + + /* Compute bit offset of the field. */ + bitoffset = double_int_add (tree_to_double_int (field_offset), + double_int_mul (byte_offset_cst, + bits_per_unit_cst)); + /* Compute bit offset where the field ends. */ + if (field_size != NULL_TREE) + bitoffset_end = double_int_add (bitoffset, + tree_to_double_int (field_size)); + else + bitoffset_end = double_int_zero; + + access_end = double_int_add (uhwi_to_double_int (offset), + uhwi_to_double_int (size)); + + /* Is there any overlap between [OFFSET, OFFSET+SIZE) and + [BITOFFSET, BITOFFSET_END)? */ + if (double_int_cmp (access_end, bitoffset, 0) > 0 + && (field_size == NULL_TREE + || double_int_cmp (uhwi_to_double_int (offset), + bitoffset_end, 0) < 0)) + { + double_int inner_offset = double_int_sub (uhwi_to_double_int (offset), + bitoffset); + /* We do have overlap. Now see if field is large enough to + cover the access. Give up for accesses spanning multiple + fields. */ + if (double_int_cmp (access_end, bitoffset_end, 0) > 0) + return NULL_TREE; + if (double_int_cmp (uhwi_to_double_int (offset), bitoffset, 0) < 0) + return NULL_TREE; + return fold_ctor_reference (type, cval, + double_int_to_uhwi (inner_offset), size); + } + } + /* When memory is not explicitely mentioned in constructor, it is 0. */ + return build_zero_cst (type); +} + +/* CTOR is value initializing memory, fold reference of type TYPE and size SIZE + to the memory at bit OFFSET. */ + +static tree +fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset, + unsigned HOST_WIDE_INT size) +{ + tree ret; + + /* We found the field with exact match. */ + if (useless_type_conversion_p (type, TREE_TYPE (ctor)) + && !offset) + return canonicalize_constructor_val (ctor); + + /* We are at the end of walk, see if we can view convert the + result. */ + if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset + /* VIEW_CONVERT_EXPR is defined only for matching sizes. */ + && operand_equal_p (TYPE_SIZE (type), + TYPE_SIZE (TREE_TYPE (ctor)), 0)) + { + ret = canonicalize_constructor_val (ctor); + ret = fold_unary (VIEW_CONVERT_EXPR, type, ret); + if (ret) + STRIP_NOPS (ret); + return ret; + } + if (TREE_CODE (ctor) == STRING_CST) + return fold_string_cst_ctor_reference (type, ctor, offset, size); + if (TREE_CODE (ctor) == CONSTRUCTOR) + { + + if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE + || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE) + return fold_array_ctor_reference (type, ctor, offset, size); + else + return fold_nonarray_ctor_reference (type, ctor, offset, size); + } + + return NULL_TREE; +} + +/* Return the tree representing the element referenced by T if T is an + ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA + names using VALUEIZE. Return NULL_TREE otherwise. */ + +tree +fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree)) +{ + tree ctor, idx, base; + HOST_WIDE_INT offset, size, max_size; + tree tem; + + if (TREE_THIS_VOLATILE (t)) + return NULL_TREE; + + if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration) + return get_symbol_constant_value (t); + + tem = fold_read_from_constant_string (t); + if (tem) + return tem; + + switch (TREE_CODE (t)) + { + case ARRAY_REF: + case ARRAY_RANGE_REF: + /* Constant indexes are handled well by get_base_constructor. + Only special case variable offsets. + FIXME: This code can't handle nested references with variable indexes + (they will be handled only by iteration of ccp). Perhaps we can bring + get_ref_base_and_extent here and make it use a valueize callback. */ + if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME + && valueize + && (idx = (*valueize) (TREE_OPERAND (t, 1))) + && host_integerp (idx, 0)) + { + tree low_bound, unit_size; + + /* If the resulting bit-offset is constant, track it. */ + if ((low_bound = array_ref_low_bound (t), + host_integerp (low_bound, 0)) + && (unit_size = array_ref_element_size (t), + host_integerp (unit_size, 1))) + { + offset = TREE_INT_CST_LOW (idx); + offset -= TREE_INT_CST_LOW (low_bound); + offset *= TREE_INT_CST_LOW (unit_size); + offset *= BITS_PER_UNIT; + + base = TREE_OPERAND (t, 0); + ctor = get_base_constructor (base, &offset, valueize); + /* Empty constructor. Always fold to 0. */ + if (ctor == error_mark_node) + return build_zero_cst (TREE_TYPE (t)); + /* Out of bound array access. Value is undefined, + but don't fold. */ + if (offset < 0) + return NULL_TREE; + /* We can not determine ctor. */ + if (!ctor) + return NULL_TREE; + return fold_ctor_reference (TREE_TYPE (t), ctor, offset, + TREE_INT_CST_LOW (unit_size) + * BITS_PER_UNIT); + } + } + /* Fallthru. */ + + case COMPONENT_REF: + case BIT_FIELD_REF: + case TARGET_MEM_REF: + case MEM_REF: + base = get_ref_base_and_extent (t, &offset, &size, &max_size); + ctor = get_base_constructor (base, &offset, valueize); + + /* Empty constructor. Always fold to 0. */ + if (ctor == error_mark_node) + return build_zero_cst (TREE_TYPE (t)); + /* We do not know precise address. */ + if (max_size == -1 || max_size != size) + return NULL_TREE; + /* We can not determine ctor. */ + if (!ctor) + return NULL_TREE; + + /* Out of bound array access. Value is undefined, but don't fold. */ + if (offset < 0) + return NULL_TREE; + + return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size); + + case REALPART_EXPR: + case IMAGPART_EXPR: + { + tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize); + if (c && TREE_CODE (c) == COMPLEX_CST) + return fold_build1_loc (EXPR_LOCATION (t), + TREE_CODE (t), TREE_TYPE (t), c); + break; + } + + default: + break; + } + + return NULL_TREE; +} + +tree +fold_const_aggregate_ref (tree t) +{ + return fold_const_aggregate_ref_1 (t, NULL); +} + +/* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN + is integer form of OBJ_TYPE_REF_TOKEN of the reference expression. + KNOWN_BINFO carries the binfo describing the true type of + OBJ_TYPE_REF_OBJECT(REF). */ + +tree +gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo) +{ + unsigned HOST_WIDE_INT offset, size; + tree v, fn; + + v = BINFO_VTABLE (known_binfo); + /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */ + if (!v) + return NULL_TREE; + + if (TREE_CODE (v) == POINTER_PLUS_EXPR) + { + offset = tree_low_cst (TREE_OPERAND (v, 1), 1) * BITS_PER_UNIT; + v = TREE_OPERAND (v, 0); + } + else + offset = 0; + + if (TREE_CODE (v) != ADDR_EXPR) + return NULL_TREE; + v = TREE_OPERAND (v, 0); + + if (TREE_CODE (v) != VAR_DECL + || !DECL_VIRTUAL_P (v) + || !DECL_INITIAL (v) + || DECL_INITIAL (v) == error_mark_node) + return NULL_TREE; + gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE); + size = tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))), 1); + offset += token * size; + fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), DECL_INITIAL (v), + offset, size); + if (!fn || integer_zerop (fn)) + return NULL_TREE; + gcc_assert (TREE_CODE (fn) == ADDR_EXPR + || TREE_CODE (fn) == FDESC_EXPR); + fn = TREE_OPERAND (fn, 0); + gcc_assert (TREE_CODE (fn) == FUNCTION_DECL); + + /* When cgraph node is missing and function is not public, we cannot + devirtualize. This can happen in WHOPR when the actual method + ends up in other partition, because we found devirtualization + possibility too late. */ + if (!can_refer_decl_in_current_unit_p (fn)) + return NULL_TREE; + + return fn; +} + +/* Return true iff VAL is a gimple expression that is known to be + non-negative. Restricted to floating-point inputs. */ + +bool +gimple_val_nonnegative_real_p (tree val) +{ + gimple def_stmt; + + gcc_assert (val && SCALAR_FLOAT_TYPE_P (TREE_TYPE (val))); + + /* Use existing logic for non-gimple trees. */ + if (tree_expr_nonnegative_p (val)) + return true; + + if (TREE_CODE (val) != SSA_NAME) + return false; + + /* Currently we look only at the immediately defining statement + to make this determination, since recursion on defining + statements of operands can lead to quadratic behavior in the + worst case. This is expected to catch almost all occurrences + in practice. It would be possible to implement limited-depth + recursion if important cases are lost. Alternatively, passes + that need this information (such as the pow/powi lowering code + in the cse_sincos pass) could be revised to provide it through + dataflow propagation. */ + + def_stmt = SSA_NAME_DEF_STMT (val); + + if (is_gimple_assign (def_stmt)) + { + tree op0, op1; + + /* See fold-const.c:tree_expr_nonnegative_p for additional + cases that could be handled with recursion. */ + + switch (gimple_assign_rhs_code (def_stmt)) + { + case ABS_EXPR: + /* Always true for floating-point operands. */ + return true; + + case MULT_EXPR: + /* True if the two operands are identical (since we are + restricted to floating-point inputs). */ + op0 = gimple_assign_rhs1 (def_stmt); + op1 = gimple_assign_rhs2 (def_stmt); + + if (op0 == op1 + || operand_equal_p (op0, op1, 0)) + return true; + + default: + return false; + } + } + else if (is_gimple_call (def_stmt)) + { + tree fndecl = gimple_call_fndecl (def_stmt); + if (fndecl + && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL) + { + tree arg1; + + switch (DECL_FUNCTION_CODE (fndecl)) + { + CASE_FLT_FN (BUILT_IN_ACOS): + CASE_FLT_FN (BUILT_IN_ACOSH): + CASE_FLT_FN (BUILT_IN_CABS): + CASE_FLT_FN (BUILT_IN_COSH): + CASE_FLT_FN (BUILT_IN_ERFC): + CASE_FLT_FN (BUILT_IN_EXP): + CASE_FLT_FN (BUILT_IN_EXP10): + CASE_FLT_FN (BUILT_IN_EXP2): + CASE_FLT_FN (BUILT_IN_FABS): + CASE_FLT_FN (BUILT_IN_FDIM): + CASE_FLT_FN (BUILT_IN_HYPOT): + CASE_FLT_FN (BUILT_IN_POW10): + return true; + + CASE_FLT_FN (BUILT_IN_SQRT): + /* sqrt(-0.0) is -0.0, and sqrt is not defined over other + nonnegative inputs. */ + if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (val)))) + return true; + + break; + + CASE_FLT_FN (BUILT_IN_POWI): + /* True if the second argument is an even integer. */ + arg1 = gimple_call_arg (def_stmt, 1); + + if (TREE_CODE (arg1) == INTEGER_CST + && (TREE_INT_CST_LOW (arg1) & 1) == 0) + return true; + + break; + + CASE_FLT_FN (BUILT_IN_POW): + /* True if the second argument is an even integer-valued + real. */ + arg1 = gimple_call_arg (def_stmt, 1); + + if (TREE_CODE (arg1) == REAL_CST) + { + REAL_VALUE_TYPE c; + HOST_WIDE_INT n; + + c = TREE_REAL_CST (arg1); + n = real_to_integer (&c); + + if ((n & 1) == 0) + { + REAL_VALUE_TYPE cint; + real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0); + if (real_identical (&c, &cint)) + return true; + } + } + + break; + + default: + return false; + } + } + } + + return false; +}