/* Statement simplification on GIMPLE.
- Copyright (C) 2010 Free Software Foundation, Inc.
+ Copyright (C) 2010, 2011 Free Software Foundation, Inc.
Split out from tree-ssa-ccp.c.
This file is part of GCC.
#include "tree-pass.h"
#include "tree-ssa-propagate.h"
#include "target.h"
+#include "gimple-fold.h"
+
+/* Return true when DECL can be referenced from current unit.
+ We can get declarations that are not possible to reference for
+ various reasons:
+
+ 1) When analyzing C++ virtual tables.
+ C++ virtual tables do have known constructors even
+ when they are keyed to other compilation unit.
+ Those tables can contain pointers to methods and vars
+ in other units. Those methods have both STATIC and EXTERNAL
+ set.
+ 2) In WHOPR mode devirtualization might lead to reference
+ to method that was partitioned elsehwere.
+ In this case we have static VAR_DECL or FUNCTION_DECL
+ that has no corresponding callgraph/varpool node
+ declaring the body.
+ 3) COMDAT functions referred by external vtables that
+ we devirtualize only during final copmilation stage.
+ At this time we already decided that we will not output
+ the function body and thus we can't reference the symbol
+ directly. */
-
-/* If SYM is a constant variable with known value, return the value.
- NULL_TREE is returned otherwise. */
-
-tree
-get_symbol_constant_value (tree sym)
-{
- if ((TREE_STATIC (sym) || DECL_EXTERNAL (sym))
- && (TREE_CODE (sym) == CONST_DECL
- || varpool_get_node (sym)->const_value_known))
- {
- tree val = DECL_INITIAL (sym);
- if (val)
- {
- STRIP_NOPS (val);
- if (is_gimple_min_invariant (val))
- {
- if (TREE_CODE (val) == ADDR_EXPR)
- {
- tree base = get_base_address (TREE_OPERAND (val, 0));
- if (base && TREE_CODE (base) == VAR_DECL)
- {
- TREE_ADDRESSABLE (base) = 1;
- if (gimple_referenced_vars (cfun))
- add_referenced_var (base);
- }
- }
- return val;
- }
- }
- /* Variables declared 'const' without an initializer
- have zero as the initializer if they may not be
- overridden at link or run time. */
- if (!val
- && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
- || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
- return fold_convert (TREE_TYPE (sym), integer_zero_node);
- }
-
- return NULL_TREE;
-}
-
-
-/* Return true if we may propagate the address expression ADDR into the
- dereference DEREF and cancel them. */
-
-bool
-may_propagate_address_into_dereference (tree addr, tree deref)
-{
- gcc_assert (TREE_CODE (deref) == MEM_REF
- && TREE_CODE (addr) == ADDR_EXPR);
-
- /* Don't propagate if ADDR's operand has incomplete type. */
- if (!COMPLETE_TYPE_P (TREE_TYPE (TREE_OPERAND (addr, 0))))
- return false;
-
- /* If the address is invariant then we do not need to preserve restrict
- qualifications. But we do need to preserve volatile qualifiers until
- we can annotate the folded dereference itself properly. */
- if (is_gimple_min_invariant (addr)
- && (!TREE_THIS_VOLATILE (deref)
- || TYPE_VOLATILE (TREE_TYPE (addr))))
- return useless_type_conversion_p (TREE_TYPE (deref),
- TREE_TYPE (TREE_OPERAND (addr, 0)));
-
- /* Else both the address substitution and the folding must result in
- a valid useless type conversion sequence. */
- return (useless_type_conversion_p (TREE_TYPE (TREE_OPERAND (deref, 0)),
- TREE_TYPE (addr))
- && useless_type_conversion_p (TREE_TYPE (deref),
- TREE_TYPE (TREE_OPERAND (addr, 0))));
-}
-
-
-/* A subroutine of fold_stmt. Attempts to fold *(A+O) to A[X].
- BASE is an array type. OFFSET is a byte displacement.
-
- LOC is the location of the original expression. */
-
-static tree
-maybe_fold_offset_to_array_ref (location_t loc, tree base, tree offset)
+static bool
+can_refer_decl_in_current_unit_p (tree decl)
{
- tree min_idx, idx, idx_type, elt_offset = integer_zero_node;
- tree array_type, elt_type, elt_size;
- tree domain_type;
-
- /* If BASE is an ARRAY_REF, we can pick up another offset (this time
- measured in units of the size of elements type) from that ARRAY_REF).
- We can't do anything if either is variable.
-
- The case we handle here is *(&A[N]+O). */
- if (TREE_CODE (base) == ARRAY_REF)
- {
- tree low_bound = array_ref_low_bound (base);
-
- elt_offset = TREE_OPERAND (base, 1);
- if (TREE_CODE (low_bound) != INTEGER_CST
- || TREE_CODE (elt_offset) != INTEGER_CST)
- return NULL_TREE;
-
- elt_offset = int_const_binop (MINUS_EXPR, elt_offset, low_bound, 0);
- base = TREE_OPERAND (base, 0);
- }
-
- /* Ignore stupid user tricks of indexing non-array variables. */
- array_type = TREE_TYPE (base);
- if (TREE_CODE (array_type) != ARRAY_TYPE)
- return NULL_TREE;
- elt_type = TREE_TYPE (array_type);
-
- /* Use signed size type for intermediate computation on the index. */
- idx_type = ssizetype;
-
- /* If OFFSET and ELT_OFFSET are zero, we don't care about the size of the
- element type (so we can use the alignment if it's not constant).
- Otherwise, compute the offset as an index by using a division. If the
- division isn't exact, then don't do anything. */
- elt_size = TYPE_SIZE_UNIT (elt_type);
- if (!elt_size)
- return NULL;
- if (integer_zerop (offset))
- {
- if (TREE_CODE (elt_size) != INTEGER_CST)
- elt_size = size_int (TYPE_ALIGN (elt_type));
+ struct varpool_node *vnode;
+ struct cgraph_node *node;
- idx = build_int_cst (idx_type, 0);
- }
- else
+ if (!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
+ return true;
+ /* External flag is set, so we deal with C++ reference
+ to static object from other file. */
+ if (DECL_EXTERNAL (decl) && TREE_STATIC (decl)
+ && TREE_CODE (decl) == VAR_DECL)
{
- unsigned HOST_WIDE_INT lquo, lrem;
- HOST_WIDE_INT hquo, hrem;
- double_int soffset;
-
- /* The final array offset should be signed, so we need
- to sign-extend the (possibly pointer) offset here
- and use signed division. */
- soffset = double_int_sext (tree_to_double_int (offset),
- TYPE_PRECISION (TREE_TYPE (offset)));
- if (TREE_CODE (elt_size) != INTEGER_CST
- || div_and_round_double (TRUNC_DIV_EXPR, 0,
- soffset.low, soffset.high,
- TREE_INT_CST_LOW (elt_size),
- TREE_INT_CST_HIGH (elt_size),
- &lquo, &hquo, &lrem, &hrem)
- || lrem || hrem)
- return NULL_TREE;
-
- idx = build_int_cst_wide (idx_type, lquo, hquo);
+ /* Just be sure it is not big in frontend setting
+ flags incorrectly. Those variables should never
+ be finalized. */
+ gcc_checking_assert (!(vnode = varpool_get_node (decl))
+ || !vnode->finalized);
+ return false;
}
-
- /* Assume the low bound is zero. If there is a domain type, get the
- low bound, if any, convert the index into that type, and add the
- low bound. */
- min_idx = build_int_cst (idx_type, 0);
- domain_type = TYPE_DOMAIN (array_type);
- if (domain_type)
+ /* When function is public, we always can introduce new reference.
+ Exception are the COMDAT functions where introducing a direct
+ reference imply need to include function body in the curren tunit. */
+ if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
+ return true;
+ /* We are not at ltrans stage; so don't worry about WHOPR.
+ Also when still gimplifying all referred comdat functions will be
+ produced.
+ ??? as observed in PR20991 for already optimized out comdat virtual functions
+ we may not neccesarily give up because the copy will be output elsewhere when
+ corresponding vtable is output. */
+ if (!flag_ltrans && (!DECL_COMDAT (decl) || !cgraph_function_flags_ready))
+ return true;
+ /* If we already output the function body, we are safe. */
+ if (TREE_ASM_WRITTEN (decl))
+ return true;
+ if (TREE_CODE (decl) == FUNCTION_DECL)
{
- idx_type = domain_type;
- if (TYPE_MIN_VALUE (idx_type))
- min_idx = TYPE_MIN_VALUE (idx_type);
- else
- min_idx = fold_convert (idx_type, min_idx);
-
- if (TREE_CODE (min_idx) != INTEGER_CST)
- return NULL_TREE;
-
- elt_offset = fold_convert (idx_type, elt_offset);
+ node = cgraph_get_node (decl);
+ /* Check that we still have function body and that we didn't took
+ the decision to eliminate offline copy of the function yet.
+ The second is important when devirtualization happens during final
+ compilation stage when making a new reference no longer makes callee
+ to be compiled. */
+ if (!node || !node->analyzed || node->global.inlined_to)
+ return false;
}
-
- if (!integer_zerop (min_idx))
- idx = int_const_binop (PLUS_EXPR, idx, min_idx, 0);
- if (!integer_zerop (elt_offset))
- idx = int_const_binop (PLUS_EXPR, idx, elt_offset, 0);
-
- /* Make sure to possibly truncate late after offsetting. */
- idx = fold_convert (idx_type, idx);
-
- /* We don't want to construct access past array bounds. For example
- char *(c[4]);
- c[3][2];
- should not be simplified into (*c)[14] or tree-vrp will
- give false warnings.
- This is only an issue for multi-dimensional arrays. */
- if (TREE_CODE (elt_type) == ARRAY_TYPE
- && domain_type)
+ else if (TREE_CODE (decl) == VAR_DECL)
{
- if (TYPE_MAX_VALUE (domain_type)
- && TREE_CODE (TYPE_MAX_VALUE (domain_type)) == INTEGER_CST
- && tree_int_cst_lt (TYPE_MAX_VALUE (domain_type), idx))
- return NULL_TREE;
- else if (TYPE_MIN_VALUE (domain_type)
- && TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST
- && tree_int_cst_lt (idx, TYPE_MIN_VALUE (domain_type)))
- return NULL_TREE;
- else if (compare_tree_int (idx, 0) < 0)
- return NULL_TREE;
+ vnode = varpool_get_node (decl);
+ if (!vnode || !vnode->finalized)
+ return false;
}
-
- {
- tree t = build4 (ARRAY_REF, elt_type, base, idx, NULL_TREE, NULL_TREE);
- SET_EXPR_LOCATION (t, loc);
- return t;
- }
-}
-
-
-/* Attempt to express (ORIG_TYPE)BASE+OFFSET as BASE[index].
- LOC is the location of original expression.
-
- Before attempting the conversion strip off existing ADDR_EXPRs. */
-
-tree
-maybe_fold_offset_to_reference (location_t loc, tree base, tree offset,
- tree orig_type)
-{
- tree ret;
-
- STRIP_NOPS (base);
- if (TREE_CODE (base) != ADDR_EXPR)
- return NULL_TREE;
-
- base = TREE_OPERAND (base, 0);
- if (types_compatible_p (orig_type, TREE_TYPE (base))
- && integer_zerop (offset))
- return base;
-
- ret = maybe_fold_offset_to_array_ref (loc, base, offset);
- if (ret && types_compatible_p (orig_type, TREE_TYPE (ret)))
- return ret;
- return NULL_TREE;
+ return true;
}
-/* Attempt to express (ORIG_TYPE)ADDR+OFFSET as (*ADDR)[index].
- LOC is the location of the original expression. */
+/* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
+ acceptable form for is_gimple_min_invariant. */
tree
-maybe_fold_offset_to_address (location_t loc, tree addr, tree offset,
- tree orig_type)
+canonicalize_constructor_val (tree cval)
{
- tree base, ret;
-
- STRIP_NOPS (addr);
- if (TREE_CODE (addr) != ADDR_EXPR)
- return NULL_TREE;
- base = TREE_OPERAND (addr, 0);
- ret = maybe_fold_offset_to_array_ref (loc, base, offset);
- if (ret)
+ STRIP_NOPS (cval);
+ if (TREE_CODE (cval) == POINTER_PLUS_EXPR
+ && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
{
- ret = build_fold_addr_expr (ret);
- if (!useless_type_conversion_p (orig_type, TREE_TYPE (ret)))
- return NULL_TREE;
- SET_EXPR_LOCATION (ret, loc);
+ tree ptr = TREE_OPERAND (cval, 0);
+ if (is_gimple_min_invariant (ptr))
+ cval = build1_loc (EXPR_LOCATION (cval),
+ ADDR_EXPR, TREE_TYPE (ptr),
+ fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
+ ptr,
+ fold_convert (ptr_type_node,
+ TREE_OPERAND (cval, 1))));
}
+ if (TREE_CODE (cval) == ADDR_EXPR)
+ {
+ tree base = get_base_address (TREE_OPERAND (cval, 0));
- return ret;
+ if (base
+ && (TREE_CODE (base) == VAR_DECL
+ || TREE_CODE (base) == FUNCTION_DECL)
+ && !can_refer_decl_in_current_unit_p (base))
+ return NULL_TREE;
+ if (cfun && base && TREE_CODE (base) == VAR_DECL)
+ add_referenced_var (base);
+ /* Fixup types in global initializers. */
+ if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
+ cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
+ }
+ return cval;
}
-
-/* A quaint feature extant in our address arithmetic is that there
- can be hidden type changes here. The type of the result need
- not be the same as the type of the input pointer.
-
- What we're after here is an expression of the form
- (T *)(&array + const)
- where array is OP0, const is OP1, RES_TYPE is T and
- the cast doesn't actually exist, but is implicit in the
- type of the POINTER_PLUS_EXPR. We'd like to turn this into
- &array[x]
- which may be able to propagate further. */
+/* If SYM is a constant variable with known value, return the value.
+ NULL_TREE is returned otherwise. */
tree
-maybe_fold_stmt_addition (location_t loc, tree res_type, tree op0, tree op1)
+get_symbol_constant_value (tree sym)
{
- tree ptd_type;
- tree t;
-
- /* The first operand should be an ADDR_EXPR. */
- if (TREE_CODE (op0) != ADDR_EXPR)
- return NULL_TREE;
- op0 = TREE_OPERAND (op0, 0);
-
- /* It had better be a constant. */
- if (TREE_CODE (op1) != INTEGER_CST)
+ if (const_value_known_p (sym))
{
- /* Or op0 should now be A[0] and the non-constant offset defined
- via a multiplication by the array element size. */
- if (TREE_CODE (op0) == ARRAY_REF
- /* As we will end up creating a variable index array access
- in the outermost array dimension make sure there isn't
- a more inner array that the index could overflow to. */
- && TREE_CODE (TREE_OPERAND (op0, 0)) != ARRAY_REF
- && integer_zerop (TREE_OPERAND (op0, 1))
- && TREE_CODE (op1) == SSA_NAME)
- {
- gimple offset_def = SSA_NAME_DEF_STMT (op1);
- tree elsz = TYPE_SIZE_UNIT (TREE_TYPE (op0));
- if (!host_integerp (elsz, 1)
- || !is_gimple_assign (offset_def))
- return NULL_TREE;
-
- /* Do not build array references of something that we can't
- see the true number of array dimensions for. */
- if (!DECL_P (TREE_OPERAND (op0, 0))
- && !handled_component_p (TREE_OPERAND (op0, 0)))
- return NULL_TREE;
-
- if (gimple_assign_rhs_code (offset_def) == MULT_EXPR
- && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
- && tree_int_cst_equal (gimple_assign_rhs2 (offset_def), elsz))
- return build_fold_addr_expr
- (build4 (ARRAY_REF, TREE_TYPE (op0),
- TREE_OPERAND (op0, 0),
- gimple_assign_rhs1 (offset_def),
- TREE_OPERAND (op0, 2),
- TREE_OPERAND (op0, 3)));
- else if (integer_onep (elsz)
- && gimple_assign_rhs_code (offset_def) != MULT_EXPR)
- return build_fold_addr_expr
- (build4 (ARRAY_REF, TREE_TYPE (op0),
- TREE_OPERAND (op0, 0),
- op1,
- TREE_OPERAND (op0, 2),
- TREE_OPERAND (op0, 3)));
- }
- else if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE
- /* Dto. */
- && TREE_CODE (TREE_TYPE (TREE_TYPE (op0))) != ARRAY_TYPE
- && TREE_CODE (op1) == SSA_NAME)
+ tree val = DECL_INITIAL (sym);
+ if (val)
{
- gimple offset_def = SSA_NAME_DEF_STMT (op1);
- tree elsz = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (op0)));
- if (!host_integerp (elsz, 1)
- || !is_gimple_assign (offset_def))
- return NULL_TREE;
-
- /* Do not build array references of something that we can't
- see the true number of array dimensions for. */
- if (!DECL_P (op0)
- && !handled_component_p (op0))
+ val = canonicalize_constructor_val (val);
+ if (val && is_gimple_min_invariant (val))
+ return val;
+ else
return NULL_TREE;
-
- if (gimple_assign_rhs_code (offset_def) == MULT_EXPR
- && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
- && tree_int_cst_equal (gimple_assign_rhs2 (offset_def), elsz))
- return build_fold_addr_expr
- (build4 (ARRAY_REF, TREE_TYPE (TREE_TYPE (op0)),
- op0, gimple_assign_rhs1 (offset_def),
- integer_zero_node, NULL_TREE));
- else if (integer_onep (elsz)
- && gimple_assign_rhs_code (offset_def) != MULT_EXPR)
- return build_fold_addr_expr
- (build4 (ARRAY_REF, TREE_TYPE (TREE_TYPE (op0)),
- op0, op1,
- integer_zero_node, NULL_TREE));
- }
-
- return NULL_TREE;
- }
-
- /* If the first operand is an ARRAY_REF, expand it so that we can fold
- the offset into it. */
- while (TREE_CODE (op0) == ARRAY_REF)
- {
- tree array_obj = TREE_OPERAND (op0, 0);
- tree array_idx = TREE_OPERAND (op0, 1);
- tree elt_type = TREE_TYPE (op0);
- tree elt_size = TYPE_SIZE_UNIT (elt_type);
- tree min_idx;
-
- if (TREE_CODE (array_idx) != INTEGER_CST)
- break;
- if (TREE_CODE (elt_size) != INTEGER_CST)
- break;
-
- /* Un-bias the index by the min index of the array type. */
- min_idx = TYPE_DOMAIN (TREE_TYPE (array_obj));
- if (min_idx)
- {
- min_idx = TYPE_MIN_VALUE (min_idx);
- if (min_idx)
- {
- if (TREE_CODE (min_idx) != INTEGER_CST)
- break;
-
- array_idx = fold_convert (TREE_TYPE (min_idx), array_idx);
- if (!integer_zerop (min_idx))
- array_idx = int_const_binop (MINUS_EXPR, array_idx,
- min_idx, 0);
- }
}
-
- /* Convert the index to a byte offset. */
- array_idx = fold_convert (sizetype, array_idx);
- array_idx = int_const_binop (MULT_EXPR, array_idx, elt_size, 0);
-
- /* Update the operands for the next round, or for folding. */
- op1 = int_const_binop (PLUS_EXPR,
- array_idx, op1, 0);
- op0 = array_obj;
+ /* Variables declared 'const' without an initializer
+ have zero as the initializer if they may not be
+ overridden at link or run time. */
+ if (!val
+ && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
+ || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
+ return build_zero_cst (TREE_TYPE (sym));
}
- ptd_type = TREE_TYPE (res_type);
- /* If we want a pointer to void, reconstruct the reference from the
- array element type. A pointer to that can be trivially converted
- to void *. This happens as we fold (void *)(ptr p+ off). */
- if (VOID_TYPE_P (ptd_type)
- && TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
- ptd_type = TREE_TYPE (TREE_TYPE (op0));
+ return NULL_TREE;
+}
- /* At which point we can try some of the same things as for indirects. */
- t = maybe_fold_offset_to_array_ref (loc, op0, op1);
- if (t)
- {
- t = build_fold_addr_expr (t);
- if (!useless_type_conversion_p (res_type, TREE_TYPE (t)))
- return NULL_TREE;
- SET_EXPR_LOCATION (t, loc);
- }
- return t;
-}
/* Subroutine of fold_stmt. We perform several simplifications of the
memory reference tree EXPR and make sure to re-gimplify them properly
maybe_fold_reference (tree expr, bool is_lhs)
{
tree *t = &expr;
+ tree result;
+
+ if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
+ || TREE_CODE (expr) == REALPART_EXPR
+ || TREE_CODE (expr) == IMAGPART_EXPR)
+ && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
+ return fold_unary_loc (EXPR_LOCATION (expr),
+ TREE_CODE (expr),
+ TREE_TYPE (expr),
+ TREE_OPERAND (expr, 0));
+ else if (TREE_CODE (expr) == BIT_FIELD_REF
+ && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
+ return fold_ternary_loc (EXPR_LOCATION (expr),
+ TREE_CODE (expr),
+ TREE_TYPE (expr),
+ TREE_OPERAND (expr, 0),
+ TREE_OPERAND (expr, 1),
+ TREE_OPERAND (expr, 2));
- if (TREE_CODE (expr) == ARRAY_REF
- && !is_lhs)
- {
- tree tem = fold_read_from_constant_string (expr);
- if (tem)
- return tem;
- }
+ while (handled_component_p (*t))
+ t = &TREE_OPERAND (*t, 0);
- /* ??? We might want to open-code the relevant remaining cases
- to avoid using the generic fold. */
- if (handled_component_p (*t)
- && CONSTANT_CLASS_P (TREE_OPERAND (*t, 0)))
+ /* Canonicalize MEM_REFs invariant address operand. Do this first
+ to avoid feeding non-canonical MEM_REFs elsewhere. */
+ if (TREE_CODE (*t) == MEM_REF
+ && !is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)))
{
- tree tem = fold (*t);
- if (tem != *t)
- return tem;
+ bool volatile_p = TREE_THIS_VOLATILE (*t);
+ tree tem = fold_binary (MEM_REF, TREE_TYPE (*t),
+ TREE_OPERAND (*t, 0),
+ TREE_OPERAND (*t, 1));
+ if (tem)
+ {
+ TREE_THIS_VOLATILE (tem) = volatile_p;
+ *t = tem;
+ tem = maybe_fold_reference (expr, is_lhs);
+ if (tem)
+ return tem;
+ return expr;
+ }
}
- while (handled_component_p (*t))
- t = &TREE_OPERAND (*t, 0);
+ if (!is_lhs
+ && (result = fold_const_aggregate_ref (expr))
+ && is_gimple_min_invariant (result))
+ return result;
/* Fold back MEM_REFs to reference trees. */
if (TREE_CODE (*t) == MEM_REF
compatibility. */
&& types_compatible_p (TREE_TYPE (*t),
TREE_TYPE (TREE_OPERAND
- (TREE_OPERAND (*t, 0), 0))))
+ (TREE_OPERAND (*t, 0), 0))))
{
tree tem;
*t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
return tem;
return expr;
}
- /* Canonicalize MEM_REFs invariant address operand. */
- else if (TREE_CODE (*t) == MEM_REF
- && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
- && !DECL_P (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))
- && !CONSTANT_CLASS_P (TREE_OPERAND (TREE_OPERAND (*t, 0), 0)))
- {
- tree tem = fold_binary (MEM_REF, TREE_TYPE (*t),
- TREE_OPERAND (*t, 0),
- TREE_OPERAND (*t, 1));
- if (tem)
- {
- *t = tem;
- tem = maybe_fold_reference (expr, is_lhs);
- if (tem)
- return tem;
- return expr;
- }
- }
else if (TREE_CODE (*t) == TARGET_MEM_REF)
{
tree tem = maybe_fold_tmr (*t);
return expr;
}
}
- else if (!is_lhs
- && DECL_P (*t))
- {
- tree tem = get_symbol_constant_value (*t);
- if (tem
- && useless_type_conversion_p (TREE_TYPE (*t), TREE_TYPE (tem)))
- {
- *t = unshare_expr (tem);
- tem = maybe_fold_reference (expr, is_lhs);
- if (tem)
- return tem;
- return expr;
- }
- }
return NULL_TREE;
}
{
tree rhs = gimple_assign_rhs1 (stmt);
- /* Try to fold a conditional expression. */
- if (TREE_CODE (rhs) == COND_EXPR)
- {
- tree op0 = COND_EXPR_COND (rhs);
- tree tem;
- bool set = false;
- location_t cond_loc = EXPR_LOCATION (rhs);
-
- if (COMPARISON_CLASS_P (op0))
- {
- fold_defer_overflow_warnings ();
- tem = fold_binary_loc (cond_loc,
- TREE_CODE (op0), TREE_TYPE (op0),
- TREE_OPERAND (op0, 0),
- TREE_OPERAND (op0, 1));
- /* This is actually a conditional expression, not a GIMPLE
- conditional statement, however, the valid_gimple_rhs_p
- test still applies. */
- set = (tem && is_gimple_condexpr (tem)
- && valid_gimple_rhs_p (tem));
- fold_undefer_overflow_warnings (set, stmt, 0);
- }
- else if (is_gimple_min_invariant (op0))
- {
- tem = op0;
- set = true;
- }
- else
- return NULL_TREE;
-
- if (set)
- result = fold_build3_loc (cond_loc, COND_EXPR, TREE_TYPE (rhs), tem,
- COND_EXPR_THEN (rhs), COND_EXPR_ELSE (rhs));
- }
-
- else if (REFERENCE_CLASS_P (rhs))
+ if (REFERENCE_CLASS_P (rhs))
return maybe_fold_reference (rhs, false);
else if (TREE_CODE (rhs) == ADDR_EXPR)
if (valid_gimple_rhs_p (result))
return result;
}
- else if (CONVERT_EXPR_CODE_P (subcode)
- && POINTER_TYPE_P (gimple_expr_type (stmt))
- && POINTER_TYPE_P (TREE_TYPE (gimple_assign_rhs1 (stmt))))
- {
- tree type = gimple_expr_type (stmt);
- tree t = maybe_fold_offset_to_address (loc,
- gimple_assign_rhs1 (stmt),
- integer_zero_node, type);
- if (t)
- return t;
- }
}
break;
case GIMPLE_BINARY_RHS:
- /* Try to fold pointer addition. */
- if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
- {
- tree type = TREE_TYPE (gimple_assign_rhs1 (stmt));
- if (TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
+ /* Try to canonicalize for boolean-typed X the comparisons
+ X == 0, X == 1, X != 0, and X != 1. */
+ if (gimple_assign_rhs_code (stmt) == EQ_EXPR
+ || gimple_assign_rhs_code (stmt) == NE_EXPR)
+ {
+ tree lhs = gimple_assign_lhs (stmt);
+ tree op1 = gimple_assign_rhs1 (stmt);
+ tree op2 = gimple_assign_rhs2 (stmt);
+ tree type = TREE_TYPE (op1);
+
+ /* Check whether the comparison operands are of the same boolean
+ type as the result type is.
+ Check that second operand is an integer-constant with value
+ one or zero. */
+ if (TREE_CODE (op2) == INTEGER_CST
+ && (integer_zerop (op2) || integer_onep (op2))
+ && useless_type_conversion_p (TREE_TYPE (lhs), type))
{
- type = build_pointer_type (TREE_TYPE (TREE_TYPE (type)));
- if (!useless_type_conversion_p
- (TREE_TYPE (gimple_assign_lhs (stmt)), type))
- type = TREE_TYPE (gimple_assign_rhs1 (stmt));
+ enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
+ bool is_logical_not = false;
+
+ /* X == 0 and X != 1 is a logical-not.of X
+ X == 1 and X != 0 is X */
+ if ((cmp_code == EQ_EXPR && integer_zerop (op2))
+ || (cmp_code == NE_EXPR && integer_onep (op2)))
+ is_logical_not = true;
+
+ if (is_logical_not == false)
+ result = op1;
+ /* Only for one-bit precision typed X the transformation
+ !X -> ~X is valied. */
+ else if (TYPE_PRECISION (type) == 1)
+ result = build1_loc (gimple_location (stmt), BIT_NOT_EXPR,
+ type, op1);
+ /* Otherwise we use !X -> X ^ 1. */
+ else
+ result = build2_loc (gimple_location (stmt), BIT_XOR_EXPR,
+ type, op1, build_int_cst (type, 1));
+
}
- result = maybe_fold_stmt_addition (gimple_location (stmt),
- type,
- gimple_assign_rhs1 (stmt),
- gimple_assign_rhs2 (stmt));
}
if (!result)
result = fold_binary_loc (loc, subcode,
- TREE_TYPE (gimple_assign_lhs (stmt)),
- gimple_assign_rhs1 (stmt),
- gimple_assign_rhs2 (stmt));
+ TREE_TYPE (gimple_assign_lhs (stmt)),
+ gimple_assign_rhs1 (stmt),
+ gimple_assign_rhs2 (stmt));
if (result)
{
STRIP_USELESS_TYPE_CONVERSION (result);
if (valid_gimple_rhs_p (result))
return result;
-
- /* Fold might have produced non-GIMPLE, so if we trust it blindly
- we lose canonicalization opportunities. Do not go again
- through fold here though, or the same non-GIMPLE will be
- produced. */
- if (commutative_tree_code (subcode)
- && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
- gimple_assign_rhs2 (stmt), false))
- return build2 (subcode, TREE_TYPE (gimple_assign_lhs (stmt)),
- gimple_assign_rhs2 (stmt),
- gimple_assign_rhs1 (stmt));
}
break;
case GIMPLE_TERNARY_RHS:
- result = fold_ternary_loc (loc, subcode,
- TREE_TYPE (gimple_assign_lhs (stmt)),
- gimple_assign_rhs1 (stmt),
- gimple_assign_rhs2 (stmt),
- gimple_assign_rhs3 (stmt));
+ /* Try to fold a conditional expression. */
+ if (gimple_assign_rhs_code (stmt) == COND_EXPR)
+ {
+ tree op0 = gimple_assign_rhs1 (stmt);
+ tree tem;
+ bool set = false;
+ location_t cond_loc = gimple_location (stmt);
+
+ if (COMPARISON_CLASS_P (op0))
+ {
+ fold_defer_overflow_warnings ();
+ tem = fold_binary_loc (cond_loc,
+ TREE_CODE (op0), TREE_TYPE (op0),
+ TREE_OPERAND (op0, 0),
+ TREE_OPERAND (op0, 1));
+ /* This is actually a conditional expression, not a GIMPLE
+ conditional statement, however, the valid_gimple_rhs_p
+ test still applies. */
+ set = (tem && is_gimple_condexpr (tem)
+ && valid_gimple_rhs_p (tem));
+ fold_undefer_overflow_warnings (set, stmt, 0);
+ }
+ else if (is_gimple_min_invariant (op0))
+ {
+ tem = op0;
+ set = true;
+ }
+ else
+ return NULL_TREE;
+
+ if (set)
+ result = fold_build3_loc (cond_loc, COND_EXPR,
+ TREE_TYPE (gimple_assign_lhs (stmt)), tem,
+ gimple_assign_rhs2 (stmt),
+ gimple_assign_rhs3 (stmt));
+ }
+
+ if (!result)
+ result = fold_ternary_loc (loc, subcode,
+ TREE_TYPE (gimple_assign_lhs (stmt)),
+ gimple_assign_rhs1 (stmt),
+ gimple_assign_rhs2 (stmt),
+ gimple_assign_rhs3 (stmt));
if (result)
{
STRIP_USELESS_TYPE_CONVERSION (result);
if (valid_gimple_rhs_p (result))
return result;
-
- /* Fold might have produced non-GIMPLE, so if we trust it blindly
- we lose canonicalization opportunities. Do not go again
- through fold here though, or the same non-GIMPLE will be
- produced. */
- if (commutative_ternary_tree_code (subcode)
- && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
- gimple_assign_rhs2 (stmt), false))
- return build3 (subcode, TREE_TYPE (gimple_assign_lhs (stmt)),
- gimple_assign_rhs2 (stmt),
- gimple_assign_rhs1 (stmt),
- gimple_assign_rhs3 (stmt));
}
break;
reaching_vuse = gimple_vuse (stmt);
push_gimplify_context (&gctx);
+ gctx.into_ssa = gimple_in_ssa_p (cfun);
if (lhs == NULL_TREE)
- gimplify_and_add (expr, &stmts);
+ {
+ gimplify_and_add (expr, &stmts);
+ /* We can end up with folding a memcpy of an empty class assignment
+ which gets optimized away by C++ gimplification. */
+ if (gimple_seq_empty_p (stmts))
+ {
+ pop_gimplify_context (NULL);
+ if (gimple_in_ssa_p (cfun))
+ {
+ unlink_stmt_vdef (stmt);
+ release_defs (stmt);
+ }
+ gsi_remove (si_p, true);
+ return;
+ }
+ }
else
tmp = get_initialized_tmp_var (expr, &stmts, NULL);
if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = new_stmt;
}
+ else if (reaching_vuse == gimple_vuse (stmt))
+ unlink_stmt_vdef (stmt);
}
gimple_set_location (new_stmt, gimple_location (stmt));
/* If the result is not a valid gimple value, or not a cast
of a valid gimple value, then we cannot use the result. */
if (is_gimple_val (new_val)
- || (is_gimple_cast (new_val)
+ || (CONVERT_EXPR_P (new_val)
&& is_gimple_val (TREE_OPERAND (new_val, 0))))
return new_val;
}
return result;
}
-/* Return the first of the base binfos of BINFO that has virtual functions. */
+/* Generate code adjusting the first parameter of a call statement determined
+ by GSI by DELTA. */
-static tree
-get_first_base_binfo_with_virtuals (tree binfo)
+void
+gimple_adjust_this_by_delta (gimple_stmt_iterator *gsi, tree delta)
{
- int i;
- tree base_binfo;
-
- for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
- if (BINFO_VIRTUALS (base_binfo))
- return base_binfo;
-
- return NULL_TREE;
+ gimple call_stmt = gsi_stmt (*gsi);
+ tree parm, tmp;
+ gimple new_stmt;
+
+ delta = convert_to_ptrofftype (delta);
+ gcc_assert (gimple_call_num_args (call_stmt) >= 1);
+ parm = gimple_call_arg (call_stmt, 0);
+ gcc_assert (POINTER_TYPE_P (TREE_TYPE (parm)));
+ tmp = create_tmp_var (TREE_TYPE (parm), NULL);
+ add_referenced_var (tmp);
+
+ tmp = make_ssa_name (tmp, NULL);
+ new_stmt = gimple_build_assign_with_ops (POINTER_PLUS_EXPR, tmp, parm, delta);
+ SSA_NAME_DEF_STMT (tmp) = new_stmt;
+ gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
+ gimple_call_set_arg (call_stmt, 0, tmp);
}
+/* Return a binfo to be used for devirtualization of calls based on an object
+ represented by a declaration (i.e. a global or automatically allocated one)
+ or NULL if it cannot be found or is not safe. CST is expected to be an
+ ADDR_EXPR of such object or the function will return NULL. Currently it is
+ safe to use such binfo only if it has no base binfo (i.e. no ancestors). */
-/* Search for a base binfo of BINFO that corresponds to TYPE and return it if
- it is found or NULL_TREE if it is not. */
-
-static tree
-get_base_binfo_for_type (tree binfo, tree type)
+tree
+gimple_extract_devirt_binfo_from_cst (tree cst)
{
- int i;
- tree base_binfo;
- tree res = NULL_TREE;
+ HOST_WIDE_INT offset, size, max_size;
+ tree base, type, expected_type, binfo;
+ bool last_artificial = false;
- for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
- if (TREE_TYPE (base_binfo) == type)
- {
- gcc_assert (!res);
- res = base_binfo;
- }
-
- return res;
-}
+ if (!flag_devirtualize
+ || TREE_CODE (cst) != ADDR_EXPR
+ || TREE_CODE (TREE_TYPE (TREE_TYPE (cst))) != RECORD_TYPE)
+ return NULL_TREE;
-/* Return a binfo describing the part of object referenced by expression REF.
- Return NULL_TREE if it cannot be determined. REF can consist of a series of
- COMPONENT_REFs of a declaration or of an INDIRECT_REF or it can also be just
- a simple declaration, indirect reference or an SSA_NAME. If the function
- discovers an INDIRECT_REF or an SSA_NAME, it will assume that the
- encapsulating type is described by KNOWN_BINFO, if it is not NULL_TREE.
- Otherwise the first non-artificial field declaration or the base declaration
- will be examined to get the encapsulating type. */
+ cst = TREE_OPERAND (cst, 0);
+ expected_type = TREE_TYPE (cst);
+ base = get_ref_base_and_extent (cst, &offset, &size, &max_size);
+ type = TREE_TYPE (base);
+ if (!DECL_P (base)
+ || max_size == -1
+ || max_size != size
+ || TREE_CODE (type) != RECORD_TYPE)
+ return NULL_TREE;
-tree
-gimple_get_relevant_ref_binfo (tree ref, tree known_binfo)
-{
+ /* Find the sub-object the constant actually refers to and mark whether it is
+ an artificial one (as opposed to a user-defined one). */
while (true)
{
- if (TREE_CODE (ref) == COMPONENT_REF)
- {
- tree par_type;
- tree binfo, base_binfo;
- tree field = TREE_OPERAND (ref, 1);
-
- if (!DECL_ARTIFICIAL (field))
- {
- tree type = TREE_TYPE (field);
- if (TREE_CODE (type) == RECORD_TYPE)
- return TYPE_BINFO (type);
- else
- return NULL_TREE;
- }
-
- par_type = TREE_TYPE (TREE_OPERAND (ref, 0));
- binfo = TYPE_BINFO (par_type);
- if (!binfo
- || BINFO_N_BASE_BINFOS (binfo) == 0)
- return NULL_TREE;
+ HOST_WIDE_INT pos, size;
+ tree fld;
- base_binfo = get_first_base_binfo_with_virtuals (binfo);
- if (base_binfo && BINFO_TYPE (base_binfo) != TREE_TYPE (field))
- {
- tree d_binfo;
+ if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (expected_type))
+ break;
+ if (offset < 0)
+ return NULL_TREE;
- d_binfo = gimple_get_relevant_ref_binfo (TREE_OPERAND (ref, 0),
- known_binfo);
- /* Get descendant binfo. */
- if (!d_binfo)
- return NULL_TREE;
- return get_base_binfo_for_type (d_binfo, TREE_TYPE (field));
- }
+ for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
+ {
+ if (TREE_CODE (fld) != FIELD_DECL)
+ continue;
- ref = TREE_OPERAND (ref, 0);
+ pos = int_bit_position (fld);
+ size = tree_low_cst (DECL_SIZE (fld), 1);
+ if (pos <= offset && (pos + size) > offset)
+ break;
}
- else if (DECL_P (ref) && TREE_CODE (TREE_TYPE (ref)) == RECORD_TYPE)
- return TYPE_BINFO (TREE_TYPE (ref));
- else if (known_binfo
- && (TREE_CODE (ref) == SSA_NAME
- || TREE_CODE (ref) == MEM_REF))
- return known_binfo;
- else
+ if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
return NULL_TREE;
+
+ last_artificial = DECL_ARTIFICIAL (fld);
+ type = TREE_TYPE (fld);
+ offset -= pos;
}
-}
-
-/* Fold a OBJ_TYPE_REF expression to the address of a function. TOKEN is
- integer form of OBJ_TYPE_REF_TOKEN of the reference expression. KNOWN_BINFO
- carries the binfo describing the true type of OBJ_TYPE_REF_OBJECT(REF). */
-
-tree
-gimple_fold_obj_type_ref_known_binfo (HOST_WIDE_INT token, tree known_binfo)
-{
- HOST_WIDE_INT i;
- tree v, fndecl;
- struct cgraph_node *node;
-
- v = BINFO_VIRTUALS (known_binfo);
- i = 0;
- while (i != token)
- {
- i += (TARGET_VTABLE_USES_DESCRIPTORS
- ? TARGET_VTABLE_USES_DESCRIPTORS : 1);
- v = TREE_CHAIN (v);
- }
-
- fndecl = TREE_VALUE (v);
- node = cgraph_get_node (fndecl);
- /* When cgraph node is missing and function is not public, we cannot
- devirtualize. This can happen in WHOPR when the actual method
- ends up in other partition, because we found devirtualization
- possibility too late. */
- if ((!node || (!node->analyzed && !node->in_other_partition))
- && (!TREE_PUBLIC (fndecl) || DECL_COMDAT (fndecl)))
- return NULL;
- return build_fold_addr_expr (fndecl);
-}
-
-
-/* Fold a OBJ_TYPE_REF expression to the address of a function. If KNOWN_TYPE
- is not NULL_TREE, it is the true type of the outmost encapsulating object if
- that comes from a pointer SSA_NAME. If the true outmost encapsulating type
- can be determined from a declaration OBJ_TYPE_REF_OBJECT(REF), it is used
- regardless of KNOWN_TYPE (which thus can be NULL_TREE). */
-
-tree
-gimple_fold_obj_type_ref (tree ref, tree known_type)
-{
- tree obj = OBJ_TYPE_REF_OBJECT (ref);
- tree known_binfo = known_type ? TYPE_BINFO (known_type) : NULL_TREE;
- tree binfo;
-
- if (TREE_CODE (obj) == ADDR_EXPR)
- obj = TREE_OPERAND (obj, 0);
-
- binfo = gimple_get_relevant_ref_binfo (obj, known_binfo);
- if (binfo)
- {
- HOST_WIDE_INT token = tree_low_cst (OBJ_TYPE_REF_TOKEN (ref), 1);
- return gimple_fold_obj_type_ref_known_binfo (token, binfo);
- }
- else
+ /* Artifical sub-objects are ancestors, we do not want to use them for
+ devirtualization, at least not here. */
+ if (last_artificial)
return NULL_TREE;
+ binfo = TYPE_BINFO (type);
+ if (!binfo || BINFO_N_BASE_BINFOS (binfo) > 0)
+ return NULL_TREE;
+ else
+ return binfo;
}
/* Attempt to fold a call statement referenced by the statement iterator GSI.
simplifies to a constant value. Return true if any changes were made.
It is assumed that the operands have been previously folded. */
-static bool
-fold_gimple_call (gimple_stmt_iterator *gsi)
+bool
+gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
{
gimple stmt = gsi_stmt (*gsi);
-
- tree callee = gimple_call_fndecl (stmt);
+ tree callee;
/* Check for builtins that CCP can handle using information not
available in the generic fold routines. */
- if (callee && DECL_BUILT_IN (callee))
+ callee = gimple_call_fndecl (stmt);
+ if (!inplace && callee && DECL_BUILT_IN (callee))
{
tree result = gimple_fold_builtin (stmt);
return true;
}
}
- else
+
+ /* Check for virtual calls that became direct calls. */
+ callee = gimple_call_fn (stmt);
+ if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
{
- /* ??? Should perhaps do this in fold proper. However, doing it
- there requires that we create a new CALL_EXPR, and that requires
- copying EH region info to the new node. Easier to just do it
- here where we can just smash the call operand. */
- /* ??? Is there a good reason not to do this in fold_stmt_inplace? */
- callee = gimple_call_fn (stmt);
- if (TREE_CODE (callee) == OBJ_TYPE_REF
- && TREE_CODE (OBJ_TYPE_REF_OBJECT (callee)) == ADDR_EXPR)
- {
- tree t;
+ tree binfo, fndecl, obj;
+ HOST_WIDE_INT token;
- t = gimple_fold_obj_type_ref (callee, NULL_TREE);
- if (t)
- {
- gimple_call_set_fn (stmt, t);
- return true;
- }
- }
+ if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
+ {
+ gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
+ return true;
+ }
+
+ obj = OBJ_TYPE_REF_OBJECT (callee);
+ binfo = gimple_extract_devirt_binfo_from_cst (obj);
+ if (!binfo)
+ return false;
+ token = TREE_INT_CST_LOW (OBJ_TYPE_REF_TOKEN (callee));
+ fndecl = gimple_get_virt_method_for_binfo (token, binfo);
+ if (!fndecl)
+ return false;
+ gimple_call_set_fndecl (stmt, fndecl);
+ return true;
}
return false;
bool changed = false;
gimple stmt = gsi_stmt (*gsi);
unsigned i;
+ gimple_stmt_iterator gsinext = *gsi;
+ gimple next_stmt;
+
+ gsi_next (&gsinext);
+ next_stmt = gsi_end_p (gsinext) ? NULL : gsi_stmt (gsinext);
/* Fold the main computation performed by the statement. */
switch (gimple_code (stmt))
case GIMPLE_ASSIGN:
{
unsigned old_num_ops = gimple_num_ops (stmt);
- tree new_rhs = fold_gimple_assign (gsi);
+ enum tree_code subcode = gimple_assign_rhs_code (stmt);
tree lhs = gimple_assign_lhs (stmt);
+ tree new_rhs;
+ /* First canonicalize operand order. This avoids building new
+ trees if this is the only thing fold would later do. */
+ if ((commutative_tree_code (subcode)
+ || commutative_ternary_tree_code (subcode))
+ && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
+ gimple_assign_rhs2 (stmt), false))
+ {
+ tree tem = gimple_assign_rhs1 (stmt);
+ gimple_assign_set_rhs1 (stmt, gimple_assign_rhs2 (stmt));
+ gimple_assign_set_rhs2 (stmt, tem);
+ changed = true;
+ }
+ new_rhs = fold_gimple_assign (gsi);
if (new_rhs
&& !useless_type_conversion_p (TREE_TYPE (lhs),
TREE_TYPE (new_rhs)))
changed = true;
}
}
- /* The entire statement may be replaced in this case. */
- if (!inplace)
- changed |= fold_gimple_call (gsi);
+ changed |= gimple_fold_call (gsi, inplace);
break;
case GIMPLE_ASM:
default:;
}
+ /* If stmt folds into nothing and it was the last stmt in a bb,
+ don't call gsi_stmt. */
+ if (gsi_end_p (*gsi))
+ {
+ gcc_assert (next_stmt == NULL);
+ return changed;
+ }
+
stmt = gsi_stmt (*gsi);
- /* Fold *& on the lhs. */
- if (gimple_has_lhs (stmt))
+ /* Fold *& on the lhs. Don't do this if stmt folded into nothing,
+ as we'd changing the next stmt. */
+ if (gimple_has_lhs (stmt) && stmt != next_stmt)
{
tree lhs = gimple_get_lhs (stmt);
if (lhs && REFERENCE_CLASS_P (lhs))
return fold_stmt_1 (gsi, false);
}
-/* Perform the minimal folding on statement STMT. Only operations like
+/* Perform the minimal folding on statement *GSI. Only operations like
*&x created by constant propagation are handled. The statement cannot
be replaced with a new one. Return true if the statement was
changed, false otherwise.
- The statement STMT should be in valid gimple form but may
+ The statement *GSI should be in valid gimple form but may
be in unfolded state as resulting from for example constant propagation
which can produce *&x = 0. */
bool
-fold_stmt_inplace (gimple stmt)
+fold_stmt_inplace (gimple_stmt_iterator *gsi)
{
- gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
- bool changed = fold_stmt_1 (&gsi, true);
- gcc_assert (gsi_stmt (gsi) == stmt);
+ gimple stmt = gsi_stmt (*gsi);
+ bool changed = fold_stmt_1 (gsi, true);
+ gcc_assert (gsi_stmt (*gsi) == stmt);
return changed;
}
/* If the definition is an AND or OR expression, we may be able to
simplify by reassociating. */
- if (innercode == TRUTH_AND_EXPR
- || innercode == TRUTH_OR_EXPR
- || (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
- && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR)))
+ if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
+ && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
{
tree inner1 = gimple_assign_rhs1 (stmt);
tree inner2 = gimple_assign_rhs2 (stmt);
gimple s;
tree t;
tree partial = NULL_TREE;
- bool is_and = (innercode == TRUTH_AND_EXPR || innercode == BIT_AND_EXPR);
+ bool is_and = (innercode == BIT_AND_EXPR);
/* Check for boolean identities that don't require recursive examination
of inner1/inner2:
/* Handle the OR case, where we are redistributing:
(inner1 OR inner2) AND (op2a code2 op2b)
=> (t OR (inner2 AND (op2a code2 op2b))) */
- else
- {
- if (integer_onep (t))
- return boolean_true_node;
- else
- /* Save partial result for later. */
- partial = t;
- }
+ else if (integer_onep (t))
+ return boolean_true_node;
+
+ /* Save partial result for later. */
+ partial = t;
}
/* Compute the second partial result, (inner2 AND (op2a code op2b)) */
return inner1;
else if (integer_zerop (t))
return boolean_false_node;
+ /* If both are the same, we can apply the identity
+ (x AND x) == x. */
+ else if (partial && same_bool_result_p (t, partial))
+ return t;
}
/* Handle the OR case. where we are redistributing:
if (operand_equal_p (op1a, op2a, 0)
&& operand_equal_p (op1b, op2b, 0))
{
+ /* Result will be either NULL_TREE, or a combined comparison. */
tree t = combine_comparisons (UNKNOWN_LOCATION,
TRUTH_ANDIF_EXPR, code1, code2,
boolean_type_node, op1a, op1b);
if (operand_equal_p (op1a, op2b, 0)
&& operand_equal_p (op1b, op2a, 0))
{
+ /* Result will be either NULL_TREE, or a combined comparison. */
tree t = combine_comparisons (UNKNOWN_LOCATION,
TRUTH_ANDIF_EXPR, code1,
swap_tree_comparison (code2),
code2, op2a, op2b))
return NULL_TREE;
}
- else if (TREE_CODE (arg) == SSA_NAME)
+ else if (TREE_CODE (arg) == SSA_NAME
+ && !SSA_NAME_IS_DEFAULT_DEF (arg))
{
- tree temp = and_var_with_comparison (arg, invert,
- code2, op2a, op2b);
+ tree temp;
+ gimple def_stmt = SSA_NAME_DEF_STMT (arg);
+ /* In simple cases we can look through PHI nodes,
+ but we have to be careful with loops.
+ See PR49073. */
+ if (! dom_info_available_p (CDI_DOMINATORS)
+ || gimple_bb (def_stmt) == gimple_bb (stmt)
+ || dominated_by_p (CDI_DOMINATORS,
+ gimple_bb (def_stmt),
+ gimple_bb (stmt)))
+ return NULL_TREE;
+ temp = and_var_with_comparison (arg, invert, code2,
+ op2a, op2b);
if (!temp)
return NULL_TREE;
else if (!result)
/* If the definition is an AND or OR expression, we may be able to
simplify by reassociating. */
- if (innercode == TRUTH_AND_EXPR
- || innercode == TRUTH_OR_EXPR
- || (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
- && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR)))
+ if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
+ && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
{
tree inner1 = gimple_assign_rhs1 (stmt);
tree inner2 = gimple_assign_rhs2 (stmt);
gimple s;
tree t;
tree partial = NULL_TREE;
- bool is_or = (innercode == TRUTH_OR_EXPR || innercode == BIT_IOR_EXPR);
+ bool is_or = (innercode == BIT_IOR_EXPR);
/* Check for boolean identities that don't require recursive examination
of inner1/inner2:
=> (t OR inner2)
If the partial result t is a constant, we win. Otherwise
continue on to try reassociating with the other inner test. */
- if (innercode == TRUTH_OR_EXPR)
+ if (is_or)
{
if (integer_onep (t))
return boolean_true_node;
/* Handle the AND case, where we are redistributing:
(inner1 AND inner2) OR (op2a code2 op2b)
=> (t AND (inner2 OR (op2a code op2b))) */
- else
- {
- if (integer_zerop (t))
- return boolean_false_node;
- else
- /* Save partial result for later. */
- partial = t;
- }
+ else if (integer_zerop (t))
+ return boolean_false_node;
+
+ /* Save partial result for later. */
+ partial = t;
}
/* Compute the second partial result, (inner2 OR (op2a code op2b)) */
{
/* Handle the OR case, where we are reassociating:
(inner1 OR inner2) OR (op2a code2 op2b)
- => (inner1 OR t) */
- if (innercode == TRUTH_OR_EXPR)
+ => (inner1 OR t)
+ => (t OR partial) */
+ if (is_or)
{
if (integer_zerop (t))
return inner1;
else if (integer_onep (t))
return boolean_true_node;
+ /* If both are the same, we can apply the identity
+ (x OR x) == x. */
+ else if (partial && same_bool_result_p (t, partial))
+ return t;
}
/* Handle the AND case, where we are redistributing:
operand to the redistributed AND expression. The
interesting case is when at least one is true.
Or, if both are the same, we can apply the identity
- (x AND x) == true. */
+ (x AND x) == x. */
if (integer_onep (partial))
return t;
else if (integer_onep (t))
return partial;
else if (same_bool_result_p (t, partial))
- return boolean_true_node;
+ return t;
}
}
}
if (operand_equal_p (op1a, op2a, 0)
&& operand_equal_p (op1b, op2b, 0))
{
+ /* Result will be either NULL_TREE, or a combined comparison. */
tree t = combine_comparisons (UNKNOWN_LOCATION,
TRUTH_ORIF_EXPR, code1, code2,
boolean_type_node, op1a, op1b);
if (operand_equal_p (op1a, op2b, 0)
&& operand_equal_p (op1b, op2a, 0))
{
+ /* Result will be either NULL_TREE, or a combined comparison. */
tree t = combine_comparisons (UNKNOWN_LOCATION,
TRUTH_ORIF_EXPR, code1,
swap_tree_comparison (code2),
code2, op2a, op2b))
return NULL_TREE;
}
- else if (TREE_CODE (arg) == SSA_NAME)
+ else if (TREE_CODE (arg) == SSA_NAME
+ && !SSA_NAME_IS_DEFAULT_DEF (arg))
{
- tree temp = or_var_with_comparison (arg, invert,
- code2, op2a, op2b);
+ tree temp;
+ gimple def_stmt = SSA_NAME_DEF_STMT (arg);
+ /* In simple cases we can look through PHI nodes,
+ but we have to be careful with loops.
+ See PR49073. */
+ if (! dom_info_available_p (CDI_DOMINATORS)
+ || gimple_bb (def_stmt) == gimple_bb (stmt)
+ || dominated_by_p (CDI_DOMINATORS,
+ gimple_bb (def_stmt),
+ gimple_bb (stmt)))
+ return NULL_TREE;
+ temp = or_var_with_comparison (arg, invert, code2,
+ op2a, op2b);
if (!temp)
return NULL_TREE;
else if (!result)
else
return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
}
+
+
+/* Fold STMT to a constant using VALUEIZE to valueize SSA names.
+
+ Either NULL_TREE, a simplified but non-constant or a constant
+ is returned.
+
+ ??? This should go into a gimple-fold-inline.h file to be eventually
+ privatized with the single valueize function used in the various TUs
+ to avoid the indirect function call overhead. */
+
+tree
+gimple_fold_stmt_to_constant_1 (gimple stmt, tree (*valueize) (tree))
+{
+ location_t loc = gimple_location (stmt);
+ switch (gimple_code (stmt))
+ {
+ case GIMPLE_ASSIGN:
+ {
+ enum tree_code subcode = gimple_assign_rhs_code (stmt);
+
+ switch (get_gimple_rhs_class (subcode))
+ {
+ case GIMPLE_SINGLE_RHS:
+ {
+ tree rhs = gimple_assign_rhs1 (stmt);
+ enum tree_code_class kind = TREE_CODE_CLASS (subcode);
+
+ if (TREE_CODE (rhs) == SSA_NAME)
+ {
+ /* If the RHS is an SSA_NAME, return its known constant value,
+ if any. */
+ return (*valueize) (rhs);
+ }
+ /* Handle propagating invariant addresses into address
+ operations. */
+ else if (TREE_CODE (rhs) == ADDR_EXPR
+ && !is_gimple_min_invariant (rhs))
+ {
+ HOST_WIDE_INT offset;
+ tree base;
+ base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
+ &offset,
+ valueize);
+ if (base
+ && (CONSTANT_CLASS_P (base)
+ || decl_address_invariant_p (base)))
+ return build_invariant_address (TREE_TYPE (rhs),
+ base, offset);
+ }
+ else if (TREE_CODE (rhs) == CONSTRUCTOR
+ && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
+ && (CONSTRUCTOR_NELTS (rhs)
+ == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
+ {
+ unsigned i;
+ tree val, list;
+
+ list = NULL_TREE;
+ FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
+ {
+ val = (*valueize) (val);
+ if (TREE_CODE (val) == INTEGER_CST
+ || TREE_CODE (val) == REAL_CST
+ || TREE_CODE (val) == FIXED_CST)
+ list = tree_cons (NULL_TREE, val, list);
+ else
+ return NULL_TREE;
+ }
+
+ return build_vector (TREE_TYPE (rhs), nreverse (list));
+ }
+
+ if (kind == tcc_reference)
+ {
+ if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
+ || TREE_CODE (rhs) == REALPART_EXPR
+ || TREE_CODE (rhs) == IMAGPART_EXPR)
+ && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
+ {
+ tree val = (*valueize) (TREE_OPERAND (rhs, 0));
+ return fold_unary_loc (EXPR_LOCATION (rhs),
+ TREE_CODE (rhs),
+ TREE_TYPE (rhs), val);
+ }
+ else if (TREE_CODE (rhs) == BIT_FIELD_REF
+ && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
+ {
+ tree val = (*valueize) (TREE_OPERAND (rhs, 0));
+ return fold_ternary_loc (EXPR_LOCATION (rhs),
+ TREE_CODE (rhs),
+ TREE_TYPE (rhs), val,
+ TREE_OPERAND (rhs, 1),
+ TREE_OPERAND (rhs, 2));
+ }
+ else if (TREE_CODE (rhs) == MEM_REF
+ && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
+ {
+ tree val = (*valueize) (TREE_OPERAND (rhs, 0));
+ if (TREE_CODE (val) == ADDR_EXPR
+ && is_gimple_min_invariant (val))
+ {
+ tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
+ unshare_expr (val),
+ TREE_OPERAND (rhs, 1));
+ if (tem)
+ rhs = tem;
+ }
+ }
+ return fold_const_aggregate_ref_1 (rhs, valueize);
+ }
+ else if (kind == tcc_declaration)
+ return get_symbol_constant_value (rhs);
+ return rhs;
+ }
+
+ case GIMPLE_UNARY_RHS:
+ {
+ /* Handle unary operators that can appear in GIMPLE form.
+ Note that we know the single operand must be a constant,
+ so this should almost always return a simplified RHS. */
+ tree lhs = gimple_assign_lhs (stmt);
+ tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
+
+ /* Conversions are useless for CCP purposes if they are
+ value-preserving. Thus the restrictions that
+ useless_type_conversion_p places for restrict qualification
+ of pointer types should not apply here.
+ Substitution later will only substitute to allowed places. */
+ if (CONVERT_EXPR_CODE_P (subcode)
+ && POINTER_TYPE_P (TREE_TYPE (lhs))
+ && POINTER_TYPE_P (TREE_TYPE (op0))
+ && (TYPE_ADDR_SPACE (TREE_TYPE (lhs))
+ == TYPE_ADDR_SPACE (TREE_TYPE (op0))))
+ return op0;
+
+ return
+ fold_unary_ignore_overflow_loc (loc, subcode,
+ gimple_expr_type (stmt), op0);
+ }
+
+ case GIMPLE_BINARY_RHS:
+ {
+ /* Handle binary operators that can appear in GIMPLE form. */
+ tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
+ tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
+
+ /* Translate &x + CST into an invariant form suitable for
+ further propagation. */
+ if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
+ && TREE_CODE (op0) == ADDR_EXPR
+ && TREE_CODE (op1) == INTEGER_CST)
+ {
+ tree off = fold_convert (ptr_type_node, op1);
+ return build_fold_addr_expr_loc
+ (loc,
+ fold_build2 (MEM_REF,
+ TREE_TYPE (TREE_TYPE (op0)),
+ unshare_expr (op0), off));
+ }
+
+ return fold_binary_loc (loc, subcode,
+ gimple_expr_type (stmt), op0, op1);
+ }
+
+ case GIMPLE_TERNARY_RHS:
+ {
+ /* Handle ternary operators that can appear in GIMPLE form. */
+ tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
+ tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
+ tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
+
+ return fold_ternary_loc (loc, subcode,
+ gimple_expr_type (stmt), op0, op1, op2);
+ }
+
+ default:
+ gcc_unreachable ();
+ }
+ }
+
+ case GIMPLE_CALL:
+ {
+ tree fn;
+
+ if (gimple_call_internal_p (stmt))
+ /* No folding yet for these functions. */
+ return NULL_TREE;
+
+ fn = (*valueize) (gimple_call_fn (stmt));
+ if (TREE_CODE (fn) == ADDR_EXPR
+ && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
+ && DECL_BUILT_IN (TREE_OPERAND (fn, 0)))
+ {
+ tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
+ tree call, retval;
+ unsigned i;
+ for (i = 0; i < gimple_call_num_args (stmt); ++i)
+ args[i] = (*valueize) (gimple_call_arg (stmt, i));
+ call = build_call_array_loc (loc,
+ gimple_call_return_type (stmt),
+ fn, gimple_call_num_args (stmt), args);
+ retval = fold_call_expr (EXPR_LOCATION (call), call, false);
+ if (retval)
+ /* fold_call_expr wraps the result inside a NOP_EXPR. */
+ STRIP_NOPS (retval);
+ return retval;
+ }
+ return NULL_TREE;
+ }
+
+ default:
+ return NULL_TREE;
+ }
+}
+
+/* Fold STMT to a constant using VALUEIZE to valueize SSA names.
+ Returns NULL_TREE if folding to a constant is not possible, otherwise
+ returns a constant according to is_gimple_min_invariant. */
+
+tree
+gimple_fold_stmt_to_constant (gimple stmt, tree (*valueize) (tree))
+{
+ tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
+ if (res && is_gimple_min_invariant (res))
+ return res;
+ return NULL_TREE;
+}
+
+
+/* The following set of functions are supposed to fold references using
+ their constant initializers. */
+
+static tree fold_ctor_reference (tree type, tree ctor,
+ unsigned HOST_WIDE_INT offset,
+ unsigned HOST_WIDE_INT size);
+
+/* See if we can find constructor defining value of BASE.
+ When we know the consructor with constant offset (such as
+ base is array[40] and we do know constructor of array), then
+ BIT_OFFSET is adjusted accordingly.
+
+ As a special case, return error_mark_node when constructor
+ is not explicitly available, but it is known to be zero
+ such as 'static const int a;'. */
+static tree
+get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
+ tree (*valueize)(tree))
+{
+ HOST_WIDE_INT bit_offset2, size, max_size;
+ if (TREE_CODE (base) == MEM_REF)
+ {
+ if (!integer_zerop (TREE_OPERAND (base, 1)))
+ {
+ if (!host_integerp (TREE_OPERAND (base, 1), 0))
+ return NULL_TREE;
+ *bit_offset += (mem_ref_offset (base).low
+ * BITS_PER_UNIT);
+ }
+
+ if (valueize
+ && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
+ base = valueize (TREE_OPERAND (base, 0));
+ if (!base || TREE_CODE (base) != ADDR_EXPR)
+ return NULL_TREE;
+ base = TREE_OPERAND (base, 0);
+ }
+
+ /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
+ DECL_INITIAL. If BASE is a nested reference into another
+ ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
+ the inner reference. */
+ switch (TREE_CODE (base))
+ {
+ case VAR_DECL:
+ if (!const_value_known_p (base))
+ return NULL_TREE;
+
+ /* Fallthru. */
+ case CONST_DECL:
+ if (!DECL_INITIAL (base)
+ && (TREE_STATIC (base) || DECL_EXTERNAL (base)))
+ return error_mark_node;
+ return DECL_INITIAL (base);
+
+ case ARRAY_REF:
+ case COMPONENT_REF:
+ base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size);
+ if (max_size == -1 || size != max_size)
+ return NULL_TREE;
+ *bit_offset += bit_offset2;
+ return get_base_constructor (base, bit_offset, valueize);
+
+ case STRING_CST:
+ case CONSTRUCTOR:
+ return base;
+
+ default:
+ return NULL_TREE;
+ }
+}
+
+/* CTOR is STRING_CST. Fold reference of type TYPE and size SIZE
+ to the memory at bit OFFSET.
+
+ We do only simple job of folding byte accesses. */
+
+static tree
+fold_string_cst_ctor_reference (tree type, tree ctor,
+ unsigned HOST_WIDE_INT offset,
+ unsigned HOST_WIDE_INT size)
+{
+ if (INTEGRAL_TYPE_P (type)
+ && (TYPE_MODE (type)
+ == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
+ && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
+ == MODE_INT)
+ && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
+ && size == BITS_PER_UNIT
+ && !(offset % BITS_PER_UNIT))
+ {
+ offset /= BITS_PER_UNIT;
+ if (offset < (unsigned HOST_WIDE_INT) TREE_STRING_LENGTH (ctor))
+ return build_int_cst_type (type, (TREE_STRING_POINTER (ctor)
+ [offset]));
+ /* Folding
+ const char a[20]="hello";
+ return a[10];
+
+ might lead to offset greater than string length. In this case we
+ know value is either initialized to 0 or out of bounds. Return 0
+ in both cases. */
+ return build_zero_cst (type);
+ }
+ return NULL_TREE;
+}
+
+/* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
+ SIZE to the memory at bit OFFSET. */
+
+static tree
+fold_array_ctor_reference (tree type, tree ctor,
+ unsigned HOST_WIDE_INT offset,
+ unsigned HOST_WIDE_INT size)
+{
+ unsigned HOST_WIDE_INT cnt;
+ tree cfield, cval;
+ double_int low_bound, elt_size;
+ double_int index, max_index;
+ double_int access_index;
+ tree domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
+ HOST_WIDE_INT inner_offset;
+
+ /* Compute low bound and elt size. */
+ if (domain_type && TYPE_MIN_VALUE (domain_type))
+ {
+ /* Static constructors for variably sized objects makes no sense. */
+ gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
+ low_bound = tree_to_double_int (TYPE_MIN_VALUE (domain_type));
+ }
+ else
+ low_bound = double_int_zero;
+ /* Static constructors for variably sized objects makes no sense. */
+ gcc_assert (TREE_CODE(TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
+ == INTEGER_CST);
+ elt_size =
+ tree_to_double_int (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
+
+
+ /* We can handle only constantly sized accesses that are known to not
+ be larger than size of array element. */
+ if (!TYPE_SIZE_UNIT (type)
+ || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
+ || double_int_cmp (elt_size,
+ tree_to_double_int (TYPE_SIZE_UNIT (type)), 0) < 0)
+ return NULL_TREE;
+
+ /* Compute the array index we look for. */
+ access_index = double_int_udiv (uhwi_to_double_int (offset / BITS_PER_UNIT),
+ elt_size, TRUNC_DIV_EXPR);
+ access_index = double_int_add (access_index, low_bound);
+
+ /* And offset within the access. */
+ inner_offset = offset % (double_int_to_uhwi (elt_size) * BITS_PER_UNIT);
+
+ /* See if the array field is large enough to span whole access. We do not
+ care to fold accesses spanning multiple array indexes. */
+ if (inner_offset + size > double_int_to_uhwi (elt_size) * BITS_PER_UNIT)
+ return NULL_TREE;
+
+ index = double_int_sub (low_bound, double_int_one);
+ FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
+ {
+ /* Array constructor might explicitely set index, or specify range
+ or leave index NULL meaning that it is next index after previous
+ one. */
+ if (cfield)
+ {
+ if (TREE_CODE (cfield) == INTEGER_CST)
+ max_index = index = tree_to_double_int (cfield);
+ else
+ {
+ gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
+ index = tree_to_double_int (TREE_OPERAND (cfield, 0));
+ max_index = tree_to_double_int (TREE_OPERAND (cfield, 1));
+ }
+ }
+ else
+ max_index = index = double_int_add (index, double_int_one);
+
+ /* Do we have match? */
+ if (double_int_cmp (access_index, index, 1) >= 0
+ && double_int_cmp (access_index, max_index, 1) <= 0)
+ return fold_ctor_reference (type, cval, inner_offset, size);
+ }
+ /* When memory is not explicitely mentioned in constructor,
+ it is 0 (or out of range). */
+ return build_zero_cst (type);
+}
+
+/* CTOR is CONSTRUCTOR of an aggregate or vector.
+ Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
+
+static tree
+fold_nonarray_ctor_reference (tree type, tree ctor,
+ unsigned HOST_WIDE_INT offset,
+ unsigned HOST_WIDE_INT size)
+{
+ unsigned HOST_WIDE_INT cnt;
+ tree cfield, cval;
+
+ FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
+ cval)
+ {
+ tree byte_offset = DECL_FIELD_OFFSET (cfield);
+ tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
+ tree field_size = DECL_SIZE (cfield);
+ double_int bitoffset;
+ double_int byte_offset_cst = tree_to_double_int (byte_offset);
+ double_int bits_per_unit_cst = uhwi_to_double_int (BITS_PER_UNIT);
+ double_int bitoffset_end, access_end;
+
+ /* Variable sized objects in static constructors makes no sense,
+ but field_size can be NULL for flexible array members. */
+ gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
+ && TREE_CODE (byte_offset) == INTEGER_CST
+ && (field_size != NULL_TREE
+ ? TREE_CODE (field_size) == INTEGER_CST
+ : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
+
+ /* Compute bit offset of the field. */
+ bitoffset = double_int_add (tree_to_double_int (field_offset),
+ double_int_mul (byte_offset_cst,
+ bits_per_unit_cst));
+ /* Compute bit offset where the field ends. */
+ if (field_size != NULL_TREE)
+ bitoffset_end = double_int_add (bitoffset,
+ tree_to_double_int (field_size));
+ else
+ bitoffset_end = double_int_zero;
+
+ access_end = double_int_add (uhwi_to_double_int (offset),
+ uhwi_to_double_int (size));
+
+ /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
+ [BITOFFSET, BITOFFSET_END)? */
+ if (double_int_cmp (access_end, bitoffset, 0) > 0
+ && (field_size == NULL_TREE
+ || double_int_cmp (uhwi_to_double_int (offset),
+ bitoffset_end, 0) < 0))
+ {
+ double_int inner_offset = double_int_sub (uhwi_to_double_int (offset),
+ bitoffset);
+ /* We do have overlap. Now see if field is large enough to
+ cover the access. Give up for accesses spanning multiple
+ fields. */
+ if (double_int_cmp (access_end, bitoffset_end, 0) > 0)
+ return NULL_TREE;
+ if (double_int_cmp (uhwi_to_double_int (offset), bitoffset, 0) < 0)
+ return NULL_TREE;
+ return fold_ctor_reference (type, cval,
+ double_int_to_uhwi (inner_offset), size);
+ }
+ }
+ /* When memory is not explicitely mentioned in constructor, it is 0. */
+ return build_zero_cst (type);
+}
+
+/* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
+ to the memory at bit OFFSET. */
+
+static tree
+fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
+ unsigned HOST_WIDE_INT size)
+{
+ tree ret;
+
+ /* We found the field with exact match. */
+ if (useless_type_conversion_p (type, TREE_TYPE (ctor))
+ && !offset)
+ return canonicalize_constructor_val (ctor);
+
+ /* We are at the end of walk, see if we can view convert the
+ result. */
+ if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
+ /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
+ && operand_equal_p (TYPE_SIZE (type),
+ TYPE_SIZE (TREE_TYPE (ctor)), 0))
+ {
+ ret = canonicalize_constructor_val (ctor);
+ ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
+ if (ret)
+ STRIP_NOPS (ret);
+ return ret;
+ }
+ if (TREE_CODE (ctor) == STRING_CST)
+ return fold_string_cst_ctor_reference (type, ctor, offset, size);
+ if (TREE_CODE (ctor) == CONSTRUCTOR)
+ {
+
+ if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
+ return fold_array_ctor_reference (type, ctor, offset, size);
+ else
+ return fold_nonarray_ctor_reference (type, ctor, offset, size);
+ }
+
+ return NULL_TREE;
+}
+
+/* Return the tree representing the element referenced by T if T is an
+ ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
+ names using VALUEIZE. Return NULL_TREE otherwise. */
+
+tree
+fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
+{
+ tree ctor, idx, base;
+ HOST_WIDE_INT offset, size, max_size;
+ tree tem;
+
+ if (TREE_THIS_VOLATILE (t))
+ return NULL_TREE;
+
+ if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
+ return get_symbol_constant_value (t);
+
+ tem = fold_read_from_constant_string (t);
+ if (tem)
+ return tem;
+
+ switch (TREE_CODE (t))
+ {
+ case ARRAY_REF:
+ case ARRAY_RANGE_REF:
+ /* Constant indexes are handled well by get_base_constructor.
+ Only special case variable offsets.
+ FIXME: This code can't handle nested references with variable indexes
+ (they will be handled only by iteration of ccp). Perhaps we can bring
+ get_ref_base_and_extent here and make it use a valueize callback. */
+ if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
+ && valueize
+ && (idx = (*valueize) (TREE_OPERAND (t, 1)))
+ && host_integerp (idx, 0))
+ {
+ tree low_bound, unit_size;
+
+ /* If the resulting bit-offset is constant, track it. */
+ if ((low_bound = array_ref_low_bound (t),
+ host_integerp (low_bound, 0))
+ && (unit_size = array_ref_element_size (t),
+ host_integerp (unit_size, 1)))
+ {
+ offset = TREE_INT_CST_LOW (idx);
+ offset -= TREE_INT_CST_LOW (low_bound);
+ offset *= TREE_INT_CST_LOW (unit_size);
+ offset *= BITS_PER_UNIT;
+
+ base = TREE_OPERAND (t, 0);
+ ctor = get_base_constructor (base, &offset, valueize);
+ /* Empty constructor. Always fold to 0. */
+ if (ctor == error_mark_node)
+ return build_zero_cst (TREE_TYPE (t));
+ /* Out of bound array access. Value is undefined,
+ but don't fold. */
+ if (offset < 0)
+ return NULL_TREE;
+ /* We can not determine ctor. */
+ if (!ctor)
+ return NULL_TREE;
+ return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
+ TREE_INT_CST_LOW (unit_size)
+ * BITS_PER_UNIT);
+ }
+ }
+ /* Fallthru. */
+
+ case COMPONENT_REF:
+ case BIT_FIELD_REF:
+ case TARGET_MEM_REF:
+ case MEM_REF:
+ base = get_ref_base_and_extent (t, &offset, &size, &max_size);
+ ctor = get_base_constructor (base, &offset, valueize);
+
+ /* Empty constructor. Always fold to 0. */
+ if (ctor == error_mark_node)
+ return build_zero_cst (TREE_TYPE (t));
+ /* We do not know precise address. */
+ if (max_size == -1 || max_size != size)
+ return NULL_TREE;
+ /* We can not determine ctor. */
+ if (!ctor)
+ return NULL_TREE;
+
+ /* Out of bound array access. Value is undefined, but don't fold. */
+ if (offset < 0)
+ return NULL_TREE;
+
+ return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size);
+
+ case REALPART_EXPR:
+ case IMAGPART_EXPR:
+ {
+ tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
+ if (c && TREE_CODE (c) == COMPLEX_CST)
+ return fold_build1_loc (EXPR_LOCATION (t),
+ TREE_CODE (t), TREE_TYPE (t), c);
+ break;
+ }
+
+ default:
+ break;
+ }
+
+ return NULL_TREE;
+}
+
+tree
+fold_const_aggregate_ref (tree t)
+{
+ return fold_const_aggregate_ref_1 (t, NULL);
+}
+
+/* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
+ is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
+ KNOWN_BINFO carries the binfo describing the true type of
+ OBJ_TYPE_REF_OBJECT(REF). */
+
+tree
+gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo)
+{
+ unsigned HOST_WIDE_INT offset, size;
+ tree v, fn;
+
+ v = BINFO_VTABLE (known_binfo);
+ /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
+ if (!v)
+ return NULL_TREE;
+
+ if (TREE_CODE (v) == POINTER_PLUS_EXPR)
+ {
+ offset = tree_low_cst (TREE_OPERAND (v, 1), 1) * BITS_PER_UNIT;
+ v = TREE_OPERAND (v, 0);
+ }
+ else
+ offset = 0;
+
+ if (TREE_CODE (v) != ADDR_EXPR)
+ return NULL_TREE;
+ v = TREE_OPERAND (v, 0);
+
+ if (TREE_CODE (v) != VAR_DECL
+ || !DECL_VIRTUAL_P (v)
+ || !DECL_INITIAL (v)
+ || DECL_INITIAL (v) == error_mark_node)
+ return NULL_TREE;
+ gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
+ size = tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))), 1);
+ offset += token * size;
+ fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), DECL_INITIAL (v),
+ offset, size);
+ if (!fn)
+ return NULL_TREE;
+ gcc_assert (TREE_CODE (fn) == ADDR_EXPR
+ || TREE_CODE (fn) == FDESC_EXPR);
+ fn = TREE_OPERAND (fn, 0);
+ gcc_assert (TREE_CODE (fn) == FUNCTION_DECL);
+
+ /* When cgraph node is missing and function is not public, we cannot
+ devirtualize. This can happen in WHOPR when the actual method
+ ends up in other partition, because we found devirtualization
+ possibility too late. */
+ if (!can_refer_decl_in_current_unit_p (fn))
+ return NULL_TREE;
+
+ return fn;
+}
+
+/* Return true iff VAL is a gimple expression that is known to be
+ non-negative. Restricted to floating-point inputs. */
+
+bool
+gimple_val_nonnegative_real_p (tree val)
+{
+ gimple def_stmt;
+
+ gcc_assert (val && SCALAR_FLOAT_TYPE_P (TREE_TYPE (val)));
+
+ /* Use existing logic for non-gimple trees. */
+ if (tree_expr_nonnegative_p (val))
+ return true;
+
+ if (TREE_CODE (val) != SSA_NAME)
+ return false;
+
+ /* Currently we look only at the immediately defining statement
+ to make this determination, since recursion on defining
+ statements of operands can lead to quadratic behavior in the
+ worst case. This is expected to catch almost all occurrences
+ in practice. It would be possible to implement limited-depth
+ recursion if important cases are lost. Alternatively, passes
+ that need this information (such as the pow/powi lowering code
+ in the cse_sincos pass) could be revised to provide it through
+ dataflow propagation. */
+
+ def_stmt = SSA_NAME_DEF_STMT (val);
+
+ if (is_gimple_assign (def_stmt))
+ {
+ tree op0, op1;
+
+ /* See fold-const.c:tree_expr_nonnegative_p for additional
+ cases that could be handled with recursion. */
+
+ switch (gimple_assign_rhs_code (def_stmt))
+ {
+ case ABS_EXPR:
+ /* Always true for floating-point operands. */
+ return true;
+
+ case MULT_EXPR:
+ /* True if the two operands are identical (since we are
+ restricted to floating-point inputs). */
+ op0 = gimple_assign_rhs1 (def_stmt);
+ op1 = gimple_assign_rhs2 (def_stmt);
+
+ if (op0 == op1
+ || operand_equal_p (op0, op1, 0))
+ return true;
+
+ default:
+ return false;
+ }
+ }
+ else if (is_gimple_call (def_stmt))
+ {
+ tree fndecl = gimple_call_fndecl (def_stmt);
+ if (fndecl
+ && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
+ {
+ tree arg1;
+
+ switch (DECL_FUNCTION_CODE (fndecl))
+ {
+ CASE_FLT_FN (BUILT_IN_ACOS):
+ CASE_FLT_FN (BUILT_IN_ACOSH):
+ CASE_FLT_FN (BUILT_IN_CABS):
+ CASE_FLT_FN (BUILT_IN_COSH):
+ CASE_FLT_FN (BUILT_IN_ERFC):
+ CASE_FLT_FN (BUILT_IN_EXP):
+ CASE_FLT_FN (BUILT_IN_EXP10):
+ CASE_FLT_FN (BUILT_IN_EXP2):
+ CASE_FLT_FN (BUILT_IN_FABS):
+ CASE_FLT_FN (BUILT_IN_FDIM):
+ CASE_FLT_FN (BUILT_IN_HYPOT):
+ CASE_FLT_FN (BUILT_IN_POW10):
+ return true;
+
+ CASE_FLT_FN (BUILT_IN_SQRT):
+ /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
+ nonnegative inputs. */
+ if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (val))))
+ return true;
+
+ break;
+
+ CASE_FLT_FN (BUILT_IN_POWI):
+ /* True if the second argument is an even integer. */
+ arg1 = gimple_call_arg (def_stmt, 1);
+
+ if (TREE_CODE (arg1) == INTEGER_CST
+ && (TREE_INT_CST_LOW (arg1) & 1) == 0)
+ return true;
+
+ break;
+
+ CASE_FLT_FN (BUILT_IN_POW):
+ /* True if the second argument is an even integer-valued
+ real. */
+ arg1 = gimple_call_arg (def_stmt, 1);
+
+ if (TREE_CODE (arg1) == REAL_CST)
+ {
+ REAL_VALUE_TYPE c;
+ HOST_WIDE_INT n;
+
+ c = TREE_REAL_CST (arg1);
+ n = real_to_integer (&c);
+
+ if ((n & 1) == 0)
+ {
+ REAL_VALUE_TYPE cint;
+ real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
+ if (real_identical (&c, &cint))
+ return true;
+ }
+ }
+
+ break;
+
+ default:
+ return false;
+ }
+ }
+ }
+
+ return false;
+}