}
-/* The regular is_gimple_min_invariant does a shallow test of the object.
- It assumes that full gimplification has happened, or will happen on the
- object. For a value coming from DECL_INITIAL, this is not true, so we
- have to be more strict ourselves. */
-
-static bool
-ccp_decl_initial_min_invariant (tree t)
-{
- if (!is_gimple_min_invariant (t))
- return false;
- if (TREE_CODE (t) == ADDR_EXPR)
- {
- /* Inline and unroll is_gimple_addressable. */
- while (1)
- {
- t = TREE_OPERAND (t, 0);
- if (is_gimple_id (t))
- return true;
- if (!handled_component_p (t))
- return false;
- }
- }
- return true;
-}
-
/* If SYM is a constant variable with known value, return the value.
NULL_TREE is returned otherwise. */
-static tree
+tree
get_symbol_constant_value (tree sym)
{
if (TREE_STATIC (sym)
{
tree val = DECL_INITIAL (sym);
if (val
- && ccp_decl_initial_min_invariant (val))
+ && is_gimple_min_invariant (val))
return val;
+ /* Variables declared 'const' without an initializer
+ have zero as the intializer if they may not be
+ overridden at link or run time. */
+ if (!val
+ && targetm.binds_local_p (sym)
+ && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
+ || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
+ return fold_convert (TREE_TYPE (sym), integer_zero_node);
}
return NULL_TREE;
static inline prop_value_t *
get_value (tree var)
{
- prop_value_t *val = &const_val[SSA_NAME_VERSION (var)];
+ prop_value_t *val;
+ if (const_val == NULL)
+ return NULL;
+
+ val = &const_val[SSA_NAME_VERSION (var)];
if (val->lattice_val == UNINITIALIZED)
*val = get_default_value (var);
bool something_changed = substitute_and_fold (const_val, false);
free (const_val);
+ const_val = NULL;
return something_changed;;
}
op0 = get_value (op0)->value;
}
+ /* Conversions are useless for CCP purposes if they are
+ value-preserving. Thus the restrictions that
+ useless_type_conversion_p places for pointer type conversions do
+ not apply here. Substitution later will only substitute to
+ allowed places. */
if ((code == NOP_EXPR || code == CONVERT_EXPR)
- && useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (op0)))
+ && ((POINTER_TYPE_P (TREE_TYPE (rhs))
+ && POINTER_TYPE_P (TREE_TYPE (op0)))
+ || useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (op0))))
return op0;
return fold_unary (code, TREE_TYPE (rhs), op0);
}
return fold_binary (code, TREE_TYPE (rhs), op0, op1);
}
+ else if (kind == tcc_declaration)
+ return get_symbol_constant_value (rhs);
+
+ else if (kind == tcc_reference)
+ return fold_const_aggregate_ref (rhs);
+
/* We may be able to fold away calls to builtin functions if their
arguments are constants. */
else if (code == CALL_EXPR
ARRAY_REF or COMPONENT_REF into constant aggregates. Return
NULL_TREE otherwise. */
-static tree
+tree
fold_const_aggregate_ref (tree t)
{
prop_value_t *value;
ctor = fold_const_aggregate_ref (base);
break;
+ case STRING_CST:
+ case CONSTRUCTOR:
+ ctor = base;
+ break;
+
default:
return NULL_TREE;
}
return fold_build1 (TREE_CODE (t), TREE_TYPE (t), c);
break;
}
-
+
+ case INDIRECT_REF:
+ {
+ tree base = TREE_OPERAND (t, 0);
+ if (TREE_CODE (base) == SSA_NAME
+ && (value = get_value (base))
+ && value->lattice_val == CONSTANT
+ && TREE_CODE (value->value) == ADDR_EXPR)
+ return fold_const_aggregate_ref (TREE_OPERAND (value->value, 0));
+ break;
+ }
+
default:
break;
}
simplified = ccp_fold (stmt);
/* If the statement is likely to have a VARYING result, then do not
bother folding the statement. */
- if (likelyvalue == VARYING)
+ else if (likelyvalue == VARYING)
simplified = get_rhs (stmt);
- /* If the statement is an ARRAY_REF or COMPONENT_REF into constant
- aggregates, extract the referenced constant. Otherwise the
- statement is likely to have an UNDEFINED value, and there will be
- nothing to do. Note that fold_const_aggregate_ref returns
- NULL_TREE if the first case does not match. */
- else if (!simplified)
- simplified = fold_const_aggregate_ref (get_rhs (stmt));
is_constant = simplified && is_gimple_min_invariant (simplified);
}
else
/* Evaluate the statement. */
- val = evaluate_stmt (stmt);
+ val = evaluate_stmt (stmt);
/* If the original LHS was a VIEW_CONVERT_EXPR, modify the constant
value to be a VIEW_CONVERT_EXPR of the old constant value.
}
-struct tree_opt_pass pass_ccp =
+struct gimple_opt_pass pass_ccp =
{
+ {
+ GIMPLE_PASS,
"ccp", /* name */
gate_ccp, /* gate */
do_ssa_ccp, /* execute */
0, /* properties_destroyed */
0, /* todo_flags_start */
TODO_dump_func | TODO_verify_ssa
- | TODO_verify_stmts | TODO_ggc_collect,/* todo_flags_finish */
- 0 /* letter */
+ | TODO_verify_stmts | TODO_ggc_collect/* todo_flags_finish */
+ }
};
}
-struct tree_opt_pass pass_store_ccp =
+struct gimple_opt_pass pass_store_ccp =
{
+ {
+ GIMPLE_PASS,
"store_ccp", /* name */
gate_store_ccp, /* gate */
do_ssa_store_ccp, /* execute */
0, /* properties_destroyed */
0, /* todo_flags_start */
TODO_dump_func | TODO_verify_ssa
- | TODO_verify_stmts | TODO_ggc_collect,/* todo_flags_finish */
- 0 /* letter */
+ | TODO_verify_stmts | TODO_ggc_collect/* todo_flags_finish */
+ }
};
/* Given a constant value VAL for bitfield FIELD, and a destination
is the desired result type. */
static tree
-maybe_fold_offset_to_array_ref (tree base, tree offset, tree orig_type)
+maybe_fold_offset_to_array_ref (tree base, tree offset, tree orig_type,
+ bool allow_negative_idx)
{
tree min_idx, idx, idx_type, elt_offset = integer_zero_node;
tree array_type, elt_type, elt_size;
+ tree domain_type;
/* If BASE is an ARRAY_REF, we can pick up another offset (this time
measured in units of the size of elements type) from that ARRAY_REF).
low bound, if any, convert the index into that type, and add the
low bound. */
min_idx = build_int_cst (idx_type, 0);
- if (TYPE_DOMAIN (array_type))
+ domain_type = TYPE_DOMAIN (array_type);
+ if (domain_type)
{
- idx_type = TYPE_DOMAIN (array_type);
+ idx_type = domain_type;
if (TYPE_MIN_VALUE (idx_type))
min_idx = TYPE_MIN_VALUE (idx_type);
else
/* Make sure to possibly truncate late after offsetting. */
idx = fold_convert (idx_type, idx);
+ /* We don't want to construct access past array bounds. For example
+ char *(c[4]);
+ c[3][2];
+ should not be simplified into (*c)[14] or tree-vrp will
+ give false warnings. The same is true for
+ struct A { long x; char d[0]; } *a;
+ (char *)a - 4;
+ which should be not folded to &a->d[-8]. */
+ if (domain_type
+ && TYPE_MAX_VALUE (domain_type)
+ && TREE_CODE (TYPE_MAX_VALUE (domain_type)) == INTEGER_CST)
+ {
+ tree up_bound = TYPE_MAX_VALUE (domain_type);
+
+ if (tree_int_cst_lt (up_bound, idx)
+ /* Accesses after the end of arrays of size 0 (gcc
+ extension) and 1 are likely intentional ("struct
+ hack"). */
+ && compare_tree_int (up_bound, 1) > 0)
+ return NULL_TREE;
+ }
+ if (domain_type
+ && TYPE_MIN_VALUE (domain_type))
+ {
+ if (!allow_negative_idx
+ && TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST
+ && tree_int_cst_lt (idx, TYPE_MIN_VALUE (domain_type)))
+ return NULL_TREE;
+ }
+ else if (!allow_negative_idx
+ && compare_tree_int (idx, 0) < 0)
+ return NULL_TREE;
+
return build4 (ARRAY_REF, elt_type, base, idx, NULL_TREE, NULL_TREE);
}
new_base = build3 (COMPONENT_REF, field_type, new_base, f, NULL_TREE);
/* Recurse to possibly find the match. */
- ret = maybe_fold_offset_to_array_ref (new_base, t, orig_type);
+ ret = maybe_fold_offset_to_array_ref (new_base, t, orig_type,
+ f == TYPE_FIELDS (record_type));
if (ret)
return ret;
ret = maybe_fold_offset_to_component_ref (field_type, new_base, t,
base = build1 (INDIRECT_REF, record_type, base);
base = build3 (COMPONENT_REF, field_type, base, f, NULL_TREE);
- t = maybe_fold_offset_to_array_ref (base, offset, orig_type);
+ t = maybe_fold_offset_to_array_ref (base, offset, orig_type,
+ f == TYPE_FIELDS (record_type));
if (t)
return t;
return maybe_fold_offset_to_component_ref (field_type, base, offset,
{
if (base_is_ptr)
base = build1 (INDIRECT_REF, type, base);
- ret = maybe_fold_offset_to_array_ref (base, offset, orig_type);
+ ret = maybe_fold_offset_to_array_ref (base, offset, orig_type, true);
}
return ret;
}
/* Fold away CONST_DECL to its value, if the type is scalar. */
if (TREE_CODE (base) == CONST_DECL
- && ccp_decl_initial_min_invariant (DECL_INITIAL (base)))
+ && is_gimple_min_invariant (DECL_INITIAL (base)))
return DECL_INITIAL (base);
/* Try folding *(&B+O) to B.X. */
}
ptd_type = TREE_TYPE (ptr_type);
+ /* If we want a pointer to void, reconstruct the reference from the
+ array element type. A pointer to that can be trivially converted
+ to void *. This happens as we fold (void *)(ptr p+ off). */
+ if (VOID_TYPE_P (ptd_type)
+ && TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
+ ptd_type = TREE_TYPE (TREE_TYPE (op0));
/* At which point we can try some of the same things as for indirects. */
- t = maybe_fold_offset_to_array_ref (op0, op1, ptd_type);
+ t = maybe_fold_offset_to_array_ref (op0, op1, ptd_type, true);
if (!t)
t = maybe_fold_offset_to_component_ref (TREE_TYPE (op0), op0, op1,
ptd_type, false);
t = maybe_fold_stmt_indirect (expr, TREE_OPERAND (expr, 0),
integer_zero_node);
+ if (!t
+ && TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR)
+ /* If we had a good reason for propagating the address here,
+ make sure we end up with valid gimple. See PR34989. */
+ t = TREE_OPERAND (TREE_OPERAND (expr, 0), 0);
break;
case NOP_EXPR:
if (TREE_CODE (arg) == COND_EXPR)
return get_maxval_strlen (COND_EXPR_THEN (arg), length, visited, type)
&& get_maxval_strlen (COND_EXPR_ELSE (arg), length, visited, type);
+ /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
+ else if (TREE_CODE (arg) == ADDR_EXPR
+ && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
+ && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
+ {
+ tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
+ if (TREE_CODE (aop0) == INDIRECT_REF
+ && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
+ return get_maxval_strlen (TREE_OPERAND (aop0, 0),
+ length, visited, type);
+ }
if (type == 2)
{
return integer_zero_node;
}
\f
+/* If va_list type is a simple pointer and nothing special is needed,
+ optimize __builtin_va_start (&ap, 0) into ap = __builtin_next_arg (0),
+ __builtin_va_end (&ap) out as NOP and __builtin_va_copy into a simple
+ pointer assignment. */
+
+static tree
+optimize_stdarg_builtin (tree call)
+{
+ tree callee, lhs, rhs;
+ bool va_list_simple_ptr;
+
+ if (TREE_CODE (call) != CALL_EXPR)
+ return NULL_TREE;
+
+ va_list_simple_ptr = POINTER_TYPE_P (va_list_type_node)
+ && (TREE_TYPE (va_list_type_node) == void_type_node
+ || TREE_TYPE (va_list_type_node) == char_type_node);
+
+ callee = get_callee_fndecl (call);
+ switch (DECL_FUNCTION_CODE (callee))
+ {
+ case BUILT_IN_VA_START:
+ if (!va_list_simple_ptr
+ || targetm.expand_builtin_va_start != NULL
+ || built_in_decls[BUILT_IN_NEXT_ARG] == NULL)
+ return NULL_TREE;
+
+ if (call_expr_nargs (call) != 2)
+ return NULL_TREE;
+
+ lhs = CALL_EXPR_ARG (call, 0);
+ if (!POINTER_TYPE_P (TREE_TYPE (lhs))
+ || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
+ != TYPE_MAIN_VARIANT (va_list_type_node))
+ return NULL_TREE;
+
+ lhs = build_fold_indirect_ref (lhs);
+ rhs = build_call_expr (built_in_decls[BUILT_IN_NEXT_ARG],
+ 1, integer_zero_node);
+ rhs = fold_convert (TREE_TYPE (lhs), rhs);
+ return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
+
+ case BUILT_IN_VA_COPY:
+ if (!va_list_simple_ptr)
+ return NULL_TREE;
+
+ if (call_expr_nargs (call) != 2)
+ return NULL_TREE;
+
+ lhs = CALL_EXPR_ARG (call, 0);
+ if (!POINTER_TYPE_P (TREE_TYPE (lhs))
+ || TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (lhs)))
+ != TYPE_MAIN_VARIANT (va_list_type_node))
+ return NULL_TREE;
+
+ lhs = build_fold_indirect_ref (lhs);
+ rhs = CALL_EXPR_ARG (call, 1);
+ if (TYPE_MAIN_VARIANT (TREE_TYPE (rhs))
+ != TYPE_MAIN_VARIANT (va_list_type_node))
+ return NULL_TREE;
+
+ rhs = fold_convert (TREE_TYPE (lhs), rhs);
+ return build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, rhs);
+
+ case BUILT_IN_VA_END:
+ return integer_zero_node;
+
+ default:
+ gcc_unreachable ();
+ }
+}
+\f
/* Convert EXPR into a GIMPLE value suitable for substitution on the
RHS of an assignment. Insert the necessary statements before
iterator *SI_P.
result = optimize_stack_restore (bb, *stmtp, i);
if (result)
break;
+ bsi_next (&i);
+ continue;
+
+ case BUILT_IN_VA_START:
+ case BUILT_IN_VA_END:
+ case BUILT_IN_VA_COPY:
+ /* These shouldn't be folded before pass_stdarg. */
+ result = optimize_stdarg_builtin (*stmtp);
+ if (result)
+ break;
/* FALLTHRU */
default:
}
-struct tree_opt_pass pass_fold_builtins =
+struct gimple_opt_pass pass_fold_builtins =
{
+ {
+ GIMPLE_PASS,
"fab", /* name */
NULL, /* gate */
execute_fold_all_builtins, /* execute */
0, /* todo_flags_start */
TODO_dump_func
| TODO_verify_ssa
- | TODO_update_ssa, /* todo_flags_finish */
- 0 /* letter */
+ | TODO_update_ssa /* todo_flags_finish */
+ }
};