GCC is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
-the Free Software Foundation; either version 2, or (at your option)
+the Free Software Foundation; either version 3, or (at your option)
any later version.
GCC is distributed in the hope that it will be useful,
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
-along with GCC; see the file COPYING. If not, write to
-the Free Software Foundation, 51 Franklin Street, Fifth Floor,
-Boston, MA 02110-1301, USA. */
+along with GCC; see the file COPYING3. If not see
+<http://www.gnu.org/licenses/>. */
#include "config.h"
#include "system.h"
#include "ipa-prop.h"
#include "value-prof.h"
#include "tree-pass.h"
+#include "target.h"
+#include "integrate.h"
/* I'm not real happy about this, but we need to handle gimple and
non-gimple trees. */
{
case INTEGER_TYPE:
case REAL_TYPE:
+ case FIXED_POINT_TYPE:
case ENUMERAL_TYPE:
case BOOLEAN_TYPE:
t = TYPE_MIN_VALUE (new);
/* We can not chain the local static declarations into the unexpanded_var_list
as we can't duplicate them or break one decl rule. Go ahead and link
them into unexpanded_var_list. */
- if (!lang_hooks.tree_inlining.auto_var_in_fn_p (old_var, id->src_fn)
+ if (!auto_var_in_fn_p (old_var, id->src_fn)
&& !DECL_EXTERNAL (old_var))
{
cfun->unexpanded_var_list = tree_cons (NULL_TREE, old_var,
variables. We don't want to copy static variables; there's only
one of those, no matter how many times we inline the containing
function. Similarly for globals from an outer function. */
- else if (lang_hooks.tree_inlining.auto_var_in_fn_p (*tp, fn))
+ else if (auto_var_in_fn_p (*tp, fn))
{
tree new_decl;
discarding. */
if (TREE_CODE (*tp) == GIMPLE_MODIFY_STMT
&& GIMPLE_STMT_OPERAND (*tp, 0) == GIMPLE_STMT_OPERAND (*tp, 1)
- && (lang_hooks.tree_inlining.auto_var_in_fn_p
- (GIMPLE_STMT_OPERAND (*tp, 0), fn)))
+ && (auto_var_in_fn_p (GIMPLE_STMT_OPERAND (*tp, 0), fn)))
{
/* Some assignments VAR = VAR; don't generate any rtl code
and thus don't count as variable modification. Avoid
(NULL_TREE,
id->eh_region_offset + TREE_INT_CST_LOW (TREE_OPERAND (*tp, 0)));
- if (!GIMPLE_TUPLE_P (*tp))
+ if (!GIMPLE_TUPLE_P (*tp) && TREE_CODE (*tp) != OMP_CLAUSE)
TREE_TYPE (*tp) = remap_type (TREE_TYPE (*tp), id);
/* The copied TARGET_EXPR has never been expanded, even if the
into multiple statements, we need to process all of them. */
while (!bsi_end_p (copy_bsi))
{
- stmt = bsi_stmt (copy_bsi);
+ tree *stmtp = bsi_stmt_ptr (copy_bsi);
+ tree stmt = *stmtp;
call = get_call_expr_in (stmt);
+ if (call && CALL_EXPR_VA_ARG_PACK (call) && id->call_expr)
+ {
+ /* __builtin_va_arg_pack () should be replaced by
+ all arguments corresponding to ... in the caller. */
+ tree p, *argarray, new_call, *call_ptr;
+ int nargs = call_expr_nargs (id->call_expr);
+
+ for (p = DECL_ARGUMENTS (id->src_fn); p; p = TREE_CHAIN (p))
+ nargs--;
+
+ argarray = (tree *) alloca ((nargs + call_expr_nargs (call))
+ * sizeof (tree));
+
+ memcpy (argarray, CALL_EXPR_ARGP (call),
+ call_expr_nargs (call) * sizeof (*argarray));
+ memcpy (argarray + call_expr_nargs (call),
+ CALL_EXPR_ARGP (id->call_expr)
+ + (call_expr_nargs (id->call_expr) - nargs),
+ nargs * sizeof (*argarray));
+
+ new_call = build_call_array (TREE_TYPE (call),
+ CALL_EXPR_FN (call),
+ nargs + call_expr_nargs (call),
+ argarray);
+ /* Copy all CALL_EXPR flags, locus and block, except
+ CALL_EXPR_VA_ARG_PACK flag. */
+ CALL_EXPR_STATIC_CHAIN (new_call)
+ = CALL_EXPR_STATIC_CHAIN (call);
+ CALL_EXPR_TAILCALL (new_call) = CALL_EXPR_TAILCALL (call);
+ CALL_EXPR_RETURN_SLOT_OPT (new_call)
+ = CALL_EXPR_RETURN_SLOT_OPT (call);
+ CALL_FROM_THUNK_P (new_call) = CALL_FROM_THUNK_P (call);
+ CALL_CANNOT_INLINE_P (new_call)
+ = CALL_CANNOT_INLINE_P (call);
+ TREE_NOTHROW (new_call) = TREE_NOTHROW (call);
+ SET_EXPR_LOCUS (new_call, EXPR_LOCUS (call));
+ TREE_BLOCK (new_call) = TREE_BLOCK (call);
+
+ call_ptr = stmtp;
+ if (TREE_CODE (*call_ptr) == GIMPLE_MODIFY_STMT)
+ call_ptr = &GIMPLE_STMT_OPERAND (*call_ptr, 1);
+ if (TREE_CODE (*call_ptr) == WITH_SIZE_EXPR)
+ call_ptr = &TREE_OPERAND (*call_ptr, 0);
+ gcc_assert (*call_ptr == call);
+ *call_ptr = new_call;
+ stmt = *stmtp;
+ update_stmt (stmt);
+ }
+
/* Statements produced by inlining can be unfolded, especially
when we constant propagated some operands. We can't fold
them right now for two reasons:
new_cfun->unexpanded_var_list = NULL;
new_cfun->cfg = NULL;
new_cfun->decl = new_fndecl /*= copy_node (callee_fndecl)*/;
- new_cfun->ib_boundaries_block = NULL;
DECL_STRUCT_FUNCTION (new_fndecl) = new_cfun;
push_cfun (new_cfun);
init_empty_tree_cfg ();
new->aux = bb;
}
- last = n_basic_blocks;
+ last = last_basic_block;
/* Now that we've duplicated the blocks, duplicate their edges. */
FOR_ALL_BB_FN (bb, cfun_to_copy)
copy_edges_for_bb (bb, count_scale);
}
/* Zero out AUX fields of newly created block during EH edge
insertion. */
- for (; last < n_basic_blocks; last++)
+ for (; last < last_basic_block; last++)
BASIC_BLOCK (last)->aux = NULL;
entry_block_map->aux = NULL;
exit_block_map->aux = NULL;
var = get_base_address (TREE_OPERAND (value, 0));
- return var && lang_hooks.tree_inlining.auto_var_in_fn_p (var, fn);
+ return var && auto_var_in_fn_p (var, fn);
}
static void
tree init_stmt;
tree var;
tree var_sub;
- tree rhs = value ? fold_convert (TREE_TYPE (p), value) : NULL;
+ tree rhs = value;
tree def = (gimple_in_ssa_p (cfun)
? gimple_default_def (id->src_cfun, p) : NULL);
+ if (value
+ && value != error_mark_node
+ && !useless_type_conversion_p (TREE_TYPE (p), TREE_TYPE (value)))
+ rhs = fold_build1 (NOP_EXPR, TREE_TYPE (p), value);
+
/* If the parameter is never assigned to, has no SSA_NAMEs created,
we may not need to create a new variable here at all. Instead, we may
be able to just use the argument value. */
It is not big deal to prohibit constant propagation here as
we will constant propagate in DOM1 pass anyway. */
if (is_gimple_min_invariant (value)
- && lang_hooks.types_compatible_p (TREE_TYPE (value), TREE_TYPE (p))
+ && useless_type_conversion_p (TREE_TYPE (p),
+ TREE_TYPE (value))
/* We have to be very careful about ADDR_EXPR. Make sure
the base variable isn't a local variable of the inlined
function, e.g., when doing recursive inlining, direct or
tree a;
tree p;
tree vars = NULL_TREE;
- int argnum = 0;
call_expr_arg_iterator iter;
tree static_chain = CALL_EXPR_STATIC_CHAIN (exp);
equivalent VAR_DECL, appropriately initialized. */
for (p = parms, a = first_call_expr_arg (exp, &iter); p;
a = next_call_expr_arg (&iter), p = TREE_CHAIN (p))
- {
- tree value;
-
- ++argnum;
-
- /* Find the initializer. */
- value = lang_hooks.tree_inlining.convert_parm_for_inlining
- (p, a, fn, argnum);
-
- setup_one_parameter (id, p, value, fn, bb, &vars);
- }
+ setup_one_parameter (id, p, a, fn, bb, &vars);
/* Initialize the static chain. */
p = DECL_STRUCT_FUNCTION (fn)->static_chain_decl;
bool use_it = false;
/* We can't use MODIFY_DEST if there's type promotion involved. */
- if (!lang_hooks.types_compatible_p (caller_type, callee_type))
+ if (!useless_type_conversion_p (callee_type, caller_type))
use_it = false;
/* ??? If we're assigning to a variable sized type, then we must
/* Build the use expr. If the return type of the function was
promoted, convert it back to the expected type. */
use = var;
- if (!lang_hooks.types_compatible_p (TREE_TYPE (var), caller_type))
+ if (!useless_type_conversion_p (caller_type, TREE_TYPE (var)))
use = fold_convert (caller_type, var);
STRIP_USELESS_TYPE_CONVERSION (use);
inlinable_function_p (tree fn)
{
bool inlinable = true;
+ bool do_warning;
+ tree always_inline;
/* If we've already decided this function shouldn't be inlined,
there's no need to check again. */
if (DECL_UNINLINABLE (fn))
return false;
- /* See if there is any language-specific reason it cannot be
- inlined. (It is important that this hook be called early because
- in C++ it may result in template instantiation.)
- If the function is not inlinable for language-specific reasons,
- it is left up to the langhook to explain why. */
- inlinable = !lang_hooks.tree_inlining.cannot_inline_tree_fn (&fn);
+ /* We only warn for functions declared `inline' by the user. */
+ do_warning = (warn_inline
+ && DECL_INLINE (fn)
+ && DECL_DECLARED_INLINE_P (fn)
+ && !DECL_IN_SYSTEM_HEADER (fn));
+
+ always_inline = lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn));
+
+ if (flag_really_no_inline
+ && always_inline == NULL)
+ {
+ if (do_warning)
+ warning (OPT_Winline, "function %q+F can never be inlined because it "
+ "is suppressed using -fno-inline", fn);
+ inlinable = false;
+ }
+
+ /* Don't auto-inline anything that might not be bound within
+ this unit of translation. */
+ else if (!DECL_DECLARED_INLINE_P (fn)
+ && DECL_REPLACEABLE_P (fn))
+ inlinable = false;
+
+ else if (!function_attribute_inlinable_p (fn))
+ {
+ if (do_warning)
+ warning (OPT_Winline, "function %q+F can never be inlined because it "
+ "uses attributes conflicting with inlining", fn);
+ inlinable = false;
+ }
/* If we don't have the function body available, we can't inline it.
However, this should not be recorded since we also get here for
about functions that would for example call alloca. But since
this a property of the function, just one warning is enough.
As a bonus we can now give more details about the reason why a
- function is not inlinable.
- We only warn for functions declared `inline' by the user. */
- bool do_warning = (warn_inline
- && DECL_INLINE (fn)
- && DECL_DECLARED_INLINE_P (fn)
- && !DECL_IN_SYSTEM_HEADER (fn));
-
- if (lookup_attribute ("always_inline", DECL_ATTRIBUTES (fn)))
+ function is not inlinable. */
+ if (always_inline)
sorry (inline_forbidden_reason, fn);
else if (do_warning)
warning (OPT_Winline, inline_forbidden_reason, fn);
case BIND_EXPR:
case WITH_CLEANUP_EXPR:
case NOP_EXPR:
+ case CONVERT_EXPR:
case VIEW_CONVERT_EXPR:
case SAVE_EXPR:
case ADDR_EXPR:
case OMP_CLAUSE:
case OMP_RETURN:
case OMP_CONTINUE:
+ case OMP_SECTIONS_SWITCH:
break;
/* We don't account constants for now. Assume that the cost is amortized
case IDENTIFIER_NODE:
case INTEGER_CST:
case REAL_CST:
+ case FIXED_CST:
case COMPLEX_CST:
case VECTOR_CST:
case STRING_CST:
*walk_subtrees = 0;
return NULL;
+ /* CHANGE_DYNAMIC_TYPE_EXPR explicitly expands to nothing. */
+ case CHANGE_DYNAMIC_TYPE_EXPR:
+ *walk_subtrees = 0;
+ return NULL;
+
/* Try to estimate the cost of assignments. We have three cases to
deal with:
1) Simple assignments to registers;
case VEC_COND_EXPR:
case PLUS_EXPR:
+ case POINTER_PLUS_EXPR:
case MINUS_EXPR:
case MULT_EXPR:
+ case FIXED_CONVERT_EXPR:
case FIX_TRUNC_EXPR:
case NEGATE_EXPR:
case UNEQ_EXPR:
case LTGT_EXPR:
- case CONVERT_EXPR:
-
case CONJ_EXPR:
case PREDECREMENT_EXPR:
case VEC_WIDEN_MULT_LO_EXPR:
case VEC_UNPACK_HI_EXPR:
case VEC_UNPACK_LO_EXPR:
- case VEC_PACK_MOD_EXPR:
+ case VEC_UNPACK_FLOAT_HI_EXPR:
+ case VEC_UNPACK_FLOAT_LO_EXPR:
+ case VEC_PACK_TRUNC_EXPR:
case VEC_PACK_SAT_EXPR:
+ case VEC_PACK_FIX_TRUNC_EXPR:
case WIDEN_MULT_EXPR:
eni_time_weights.omp_cost = 40;
}
-typedef struct function *function_p;
-
-DEF_VEC_P(function_p);
-DEF_VEC_ALLOC_P(function_p,heap);
-
-/* Initialized with NOGC, making this poisonous to the garbage collector. */
-static VEC(function_p,heap) *cfun_stack;
-
-void
-push_cfun (struct function *new_cfun)
-{
- VEC_safe_push (function_p, heap, cfun_stack, cfun);
- cfun = new_cfun;
-}
-
-void
-pop_cfun (void)
-{
- cfun = VEC_pop (function_p, cfun_stack);
-}
-
/* Install new lexical TREE_BLOCK underneath 'current_block'. */
static void
add_lexical_block (tree current_block, tree new_block)
id->src_fn = fn;
id->src_node = cg_edge->callee;
id->src_cfun = DECL_STRUCT_FUNCTION (fn);
+ id->call_expr = t;
initialize_inlined_parameters (id, t, fn, bb);
push_gimplify_context ();
+ /* We make no attempts to keep dominance info up-to-date. */
+ free_dominance_info (CDI_DOMINATORS);
+ free_dominance_info (CDI_POST_DOMINATORS);
+
/* Reach the trees by walking over the CFG, and note the
enclosing basic-blocks in the call edges. */
/* We walk the blocks going forward, because inlined function bodies
gimple_expand_calls_inline (bb, &id);
pop_gimplify_context (NULL);
- /* Renumber the (code) basic_blocks consecutively. */
- compact_blocks ();
- /* Renumber the lexical scoping (non-code) blocks consecutively. */
- number_blocks (fn);
#ifdef ENABLE_CHECKING
{
gcc_assert (e->inline_failed);
}
#endif
+
+ /* Fold the statements before compacting/renumbering the basic blocks. */
+ fold_marked_statements (last, id.statements_to_fold);
+ pointer_set_destroy (id.statements_to_fold);
+
+ /* Renumber the (code) basic_blocks consecutively. */
+ compact_blocks ();
+ /* Renumber the lexical scoping (non-code) blocks consecutively. */
+ number_blocks (fn);
/* We are not going to maintain the cgraph edges up to date.
Kill it so it won't confuse us. */
cgraph_node_remove_callees (id.dst_node);
- fold_marked_statements (last, id.statements_to_fold);
- pointer_set_destroy (id.statements_to_fold);
fold_cond_expr_cond ();
if (current_function_has_nonlocal_label)
make_nonlocal_label_edges ();
- /* We make no attempts to keep dominance info up-to-date. */
- free_dominance_info (CDI_DOMINATORS);
- free_dominance_info (CDI_POST_DOMINATORS);
/* It would be nice to check SSA/CFG/statement consistency here, but it is
not possible yet - the IPA passes might make various functions to not
throw and they don't care to proactively update local EH info. This is
TREE_READONLY (copy) = TREE_READONLY (decl);
TREE_THIS_VOLATILE (copy) = TREE_THIS_VOLATILE (decl);
DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
+ DECL_NO_TBAA_P (copy) = DECL_NO_TBAA_P (decl);
return copy_decl_for_dup_finish (id, decl, copy);
}
{
TREE_ADDRESSABLE (copy) = TREE_ADDRESSABLE (decl);
DECL_GIMPLE_REG_P (copy) = DECL_GIMPLE_REG_P (decl);
+ DECL_NO_TBAA_P (copy) = DECL_NO_TBAA_P (decl);
}
return copy_decl_for_dup_finish (id, decl, copy);