/* Tree inlining.
- Copyright 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
- Free Software Foundation, Inc.
+ Copyright 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011,
+ 2012 Free Software Foundation, Inc.
Contributed by Alexandre Oliva <aoliva@redhat.com>
This file is part of GCC.
|| decl_function_context (*tp) == id->src_fn))
/* These may need to be remapped for EH handling. */
*tp = remap_decl (*tp, id);
+ else if (TREE_CODE (*tp) == FIELD_DECL)
+ {
+ /* If the enclosing record type is variably_modified_type_p, the field
+ has already been remapped. Otherwise, it need not be. */
+ tree *n = (tree *) pointer_map_contains (id->decl_map, *tp);
+ if (n)
+ *tp = *n;
+ *walk_subtrees = 0;
+ }
else if (TYPE_P (*tp))
/* Types may need remapping as well. */
*tp = remap_type (*tp, id);
ptr, TREE_OPERAND (*tp, 1));
TREE_THIS_NOTRAP (*tp) = TREE_THIS_NOTRAP (old);
TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
+ TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
*walk_subtrees = 0;
return NULL;
= gimple_build_omp_critical (s1, gimple_omp_critical_name (stmt));
break;
+ case GIMPLE_TRANSACTION:
+ s1 = remap_gimple_seq (gimple_transaction_body (stmt), id);
+ copy = gimple_build_transaction (s1, gimple_transaction_label (stmt));
+ gimple_transaction_set_subcode (copy, gimple_transaction_subcode (stmt));
+ break;
+
default:
gcc_unreachable ();
}
&& value != error_mark_node
&& !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
{
+ /* If we can match up types by promotion/demotion do so. */
if (fold_convertible_p (TREE_TYPE (p), value))
- rhs = fold_build1 (NOP_EXPR, TREE_TYPE (p), value);
+ rhs = fold_convert (TREE_TYPE (p), value);
else
- /* ??? For valid (GIMPLE) programs we should not end up here.
- Still if something has gone wrong and we end up with truly
- mismatched types here, fall back to using a VIEW_CONVERT_EXPR
- to not leak invalid GIMPLE to the following passes. */
- rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value);
+ {
+ /* ??? For valid programs we should not end up here.
+ Still if we end up with truly mismatched types here, fall back
+ to using a VIEW_CONVERT_EXPR or a literal zero to not leak invalid
+ GIMPLE to the following passes. */
+ if (!is_gimple_reg_type (TREE_TYPE (value))
+ || TYPE_SIZE (TREE_TYPE (p)) == TYPE_SIZE (TREE_TYPE (value)))
+ rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (p), value);
+ else
+ rhs = build_zero_cst (TREE_TYPE (p));
+ }
}
/* Make an equivalent VAR_DECL. Note that we must NOT remap the type
/* Make gimplifier happy about this variable. */
DECL_SEEN_IN_BIND_EXPR_P (var) = 1;
+ /* We are eventually using the value - make sure all variables
+ referenced therein are properly recorded. */
+ if (value
+ && gimple_in_ssa_p (cfun)
+ && TREE_CODE (value) == ADDR_EXPR)
+ {
+ tree base = get_base_address (TREE_OPERAND (value, 0));
+ if (base && TREE_CODE (base) == VAR_DECL)
+ add_referenced_var (base);
+ }
+
/* If the parameter is never assigned to, has no SSA_NAMEs created,
we would not need to create a new variable here at all, if it
weren't for debug info. Still, we can just use the argument
else
caller_type = TREE_TYPE (TREE_TYPE (callee));
- /* We don't need to do anything for functions that don't return
- anything. */
- if (!result || VOID_TYPE_P (callee_type))
+ /* We don't need to do anything for functions that don't return anything. */
+ if (VOID_TYPE_P (callee_type))
return NULL_TREE;
/* If there was a return slot, then the return value is the
promoted, convert it back to the expected type. */
use = var;
if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
- use = fold_convert (caller_type, var);
+ {
+ /* If we can match up types by promotion/demotion do so. */
+ if (fold_convertible_p (caller_type, var))
+ use = fold_convert (caller_type, var);
+ else
+ {
+ /* ??? For valid programs we should not end up here.
+ Still if we end up with truly mismatched types here, fall back
+ to using a MEM_REF to not leak invalid GIMPLE to the following
+ passes. */
+ /* Prevent var from being written into SSA form. */
+ if (TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE
+ || TREE_CODE (TREE_TYPE (var)) == COMPLEX_TYPE)
+ DECL_GIMPLE_REG_P (var) = false;
+ else if (is_gimple_reg_type (TREE_TYPE (var)))
+ TREE_ADDRESSABLE (var) = true;
+ use = fold_build2 (MEM_REF, caller_type,
+ build_fold_addr_expr (var),
+ build_int_cst (ptr_type_node, 0));
+ }
+ }
STRIP_USELESS_TYPE_CONVERSION (use);
if (gimple_in_ssa_p (id->src_cfun))
add_referenced_var (temp);
insert_decl_map (id, result, temp);
- /* When RESULT_DECL is in SSA form, we need to use it's default_def
- SSA_NAME. */
- if (gimple_in_ssa_p (id->src_cfun) && gimple_default_def (id->src_cfun, result))
- temp = remap_ssa_name (gimple_default_def (id->src_cfun, result), id);
+ /* When RESULT_DECL is in SSA form, we need to remap and initialize
+ it's default_def SSA_NAME. */
+ if (gimple_in_ssa_p (id->src_cfun)
+ && is_gimple_reg (result))
+ {
+ temp = make_ssa_name (temp, NULL);
+ insert_decl_map (id, gimple_default_def (id->src_cfun, result),
+ temp);
+ }
insert_init_stmt (id, entry_bb, gimple_build_assign (temp, var));
}
else
??? We may consider mapping RTL costs to this. */
case COND_EXPR:
case VEC_COND_EXPR:
+ case VEC_PERM_EXPR:
case PLUS_EXPR:
case POINTER_PLUS_EXPR:
case DOT_PROD_EXPR:
case WIDEN_MULT_PLUS_EXPR:
case WIDEN_MULT_MINUS_EXPR:
+ case WIDEN_LSHIFT_EXPR:
case VEC_WIDEN_MULT_HI_EXPR:
case VEC_WIDEN_MULT_LO_EXPR:
case VEC_PACK_TRUNC_EXPR:
case VEC_PACK_SAT_EXPR:
case VEC_PACK_FIX_TRUNC_EXPR:
- case VEC_EXTRACT_EVEN_EXPR:
- case VEC_EXTRACT_ODD_EXPR:
- case VEC_INTERLEAVE_HIGH_EXPR:
- case VEC_INTERLEAVE_LOW_EXPR:
+ case VEC_WIDEN_LSHIFT_HI_EXPR:
+ case VEC_WIDEN_LSHIFT_LO_EXPR:
return 1;
likely be a real store, so the cost of the GIMPLE_ASSIGN is the cost
of moving something into "a", which we compute using the function
estimate_move_cost. */
+ if (gimple_clobber_p (stmt))
+ return 0; /* ={v} {CLOBBER} stmt expands to nothing. */
+
lhs = gimple_assign_lhs (stmt);
rhs = gimple_assign_rhs1 (stmt);
case GIMPLE_CALL:
{
tree decl = gimple_call_fndecl (stmt);
- struct cgraph_node *node;
+ struct cgraph_node *node = NULL;
/* Do not special case builtins where we see the body.
This just confuse inliner. */
}
}
- cost = weights->call_cost;
+ cost = node ? weights->call_cost : weights->indirect_call_cost;
if (gimple_call_lhs (stmt))
cost += estimate_move_cost (TREE_TYPE (gimple_call_lhs (stmt)));
for (i = 0; i < gimple_call_num_args (stmt); i++)
return (weights->omp_cost
+ estimate_num_insns_seq (gimple_omp_body (stmt), weights));
+ case GIMPLE_TRANSACTION:
+ return (weights->tm_cost
+ + estimate_num_insns_seq (gimple_transaction_body (stmt),
+ weights));
+
default:
gcc_unreachable ();
}
init_inline_once (void)
{
eni_size_weights.call_cost = 1;
+ eni_size_weights.indirect_call_cost = 3;
eni_size_weights.target_builtin_call_cost = 1;
eni_size_weights.div_mod_cost = 1;
eni_size_weights.omp_cost = 40;
+ eni_size_weights.tm_cost = 10;
eni_size_weights.time_based = false;
eni_size_weights.return_cost = 1;
underestimating the cost does less harm than overestimating it, so
we choose a rather small value here. */
eni_time_weights.call_cost = 10;
+ eni_time_weights.indirect_call_cost = 15;
eni_time_weights.target_builtin_call_cost = 1;
eni_time_weights.div_mod_cost = 10;
eni_time_weights.omp_cost = 40;
+ eni_time_weights.tm_cost = 40;
eni_time_weights.time_based = true;
eni_time_weights.return_cost = 2;
}
goto egress;
if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn))
+ /* For extern inline functions that get redefined we always
+ silently ignored always_inline flag. Better behaviour would
+ be to be able to keep both bodies and use extern inline body
+ for inlining, but we can't do that because frontends overwrite
+ the body. */
+ && !cg_edge->callee->local.redefined_extern_inline
/* Avoid warnings during early inline pass. */
&& cgraph_global_info_ready
/* PR 20090218-1_0.c. Body can be provided by another module. */
/* Expand call statements reachable from STMT_P.
We can only have CALL_EXPRs as the "toplevel" tree code or nested
- in a MODIFY_EXPR. See gimple.c:get_call_expr_in(). We can
- unfortunately not use that function here because we need a pointer
- to the CALL_EXPR, not the tree itself. */
+ in a MODIFY_EXPR. */
static bool
gimple_expand_calls_inline (basic_block bb, copy_body_data *id)
struct gimplify_ctx gctx;
bool inlined_p = false;
- /* There is no point in performing inlining if errors have already
- occurred -- and we might crash if we try to inline invalid
- code. */
- if (seen_error ())
- return 0;
-
/* Clear out ID. */
memset (&id, 0, sizeof (id));
if (!args_to_skip || !bitmap_bit_p (args_to_skip, i))
{
tree new_tree = remap_decl (arg, id);
+ if (TREE_CODE (new_tree) != PARM_DECL)
+ new_tree = id->copy_decl (arg, id);
lang_hooks.dup_lang_specific_decl (new_tree);
*parg = new_tree;
parg = &DECL_CHAIN (new_tree);
If non-NULL ARGS_TO_SKIP determine function parameters to remove
from new version.
+ If SKIP_RETURN is true, the new version will return void.
If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
If non_NULL NEW_ENTRY determine new entry BB of the clone.
*/
tree_function_versioning (tree old_decl, tree new_decl,
VEC(ipa_replace_map_p,gc)* tree_map,
bool update_clones, bitmap args_to_skip,
- bitmap blocks_to_copy, basic_block new_entry)
+ bool skip_return, bitmap blocks_to_copy,
+ basic_block new_entry)
{
struct cgraph_node *old_version_node;
struct cgraph_node *new_version_node;
/* Add local vars. */
add_local_variables (DECL_STRUCT_FUNCTION (old_decl), cfun, &id, false);
- if (DECL_RESULT (old_decl) != NULL_TREE)
+ if (DECL_RESULT (old_decl) == NULL_TREE)
+ ;
+ else if (skip_return && !VOID_TYPE_P (TREE_TYPE (DECL_RESULT (old_decl))))
+ {
+ DECL_RESULT (new_decl)
+ = build_decl (DECL_SOURCE_LOCATION (DECL_RESULT (old_decl)),
+ RESULT_DECL, NULL_TREE, void_type_node);
+ DECL_CONTEXT (DECL_RESULT (new_decl)) = new_decl;
+ cfun->returns_struct = 0;
+ cfun->returns_pcc_struct = 0;
+ }
+ else
{
tree old_name;
DECL_RESULT (new_decl) = remap_decl (DECL_RESULT (old_decl), &id);