#include "system.h"
#include "coretypes.h"
#include "tm.h"
-#include "toplev.h" /* floor_log2 */
#include "diagnostic-core.h"
#include "tree.h"
#include "tree-inline.h"
{
gimple_stmt_iterator gsi = gsi_last_bb (id->entry_bb);
gimple init_stmt;
+ tree zero = build_zero_cst (TREE_TYPE (new_tree));
- init_stmt = gimple_build_assign (new_tree,
- fold_convert (TREE_TYPE (new_tree),
- integer_zero_node));
+ init_stmt = gimple_build_assign (new_tree, zero);
gsi_insert_after (&gsi, init_stmt, GSI_NEW_STMT);
SSA_NAME_IS_DEFAULT_DEF (new_tree) = 0;
}
*tp = fold_build2 (MEM_REF, TREE_TYPE (*tp),
ptr, TREE_OPERAND (*tp, 1));
TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
+ TREE_THIS_NOTRAP (*tp) = TREE_THIS_NOTRAP (old);
}
TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
*walk_subtrees = 0;
*tp = build1 (INDIRECT_REF, type, new_tree);
TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
TREE_SIDE_EFFECTS (*tp) = TREE_SIDE_EFFECTS (old);
+ TREE_READONLY (*tp) = TREE_READONLY (old);
+ TREE_THIS_NOTRAP (*tp) = TREE_THIS_NOTRAP (old);
}
}
*walk_subtrees = 0;
tree new_rhs;
new_rhs = force_gimple_operand_gsi (&seq_gsi,
gimple_assign_rhs1 (stmt),
- true, NULL, false, GSI_NEW_STMT);
+ true, NULL, false,
+ GSI_CONTINUE_LINKING);
gimple_assign_set_rhs1 (stmt, new_rhs);
id->regimplify = false;
}
edge_iterator ei;
gimple phi;
gimple_stmt_iterator si;
+ edge new_edge;
+ bool inserted = false;
for (si = gsi_start (phi_nodes (bb)); !gsi_end_p (si); gsi_next (&si))
{
tree res, new_res;
gimple new_phi;
- edge new_edge;
phi = gsi_stmt (si);
res = PHI_RESULT (phi);
&& !is_gimple_val (new_arg))
{
gimple_seq stmts = NULL;
- basic_block tem;
new_arg = force_gimple_operand (new_arg, &stmts, true, NULL);
- tem = gsi_insert_seq_on_edge_immediate (new_edge, stmts);
- if (tem)
- new_edge = single_succ_edge (tem);
+ gsi_insert_seq_on_edge (new_edge, stmts);
+ inserted = true;
}
add_phi_arg (new_phi, new_arg, new_edge,
gimple_phi_arg_location_from_edge (phi, old_edge));
}
}
}
+
+ /* Commit the delayed edge insertions. */
+ if (inserted)
+ FOR_EACH_EDGE (new_edge, ei, new_bb->preds)
+ gsi_commit_one_edge_insert (new_edge, NULL);
}
gcc_assert (!VOID_TYPE_P (type));
+ if (TREE_CODE (type) == VECTOR_TYPE)
+ {
+ enum machine_mode inner = TYPE_MODE (TREE_TYPE (type));
+ enum machine_mode simd
+ = targetm.vectorize.preferred_simd_mode (inner);
+ int simd_mode_size = GET_MODE_SIZE (simd);
+ return ((GET_MODE_SIZE (TYPE_MODE (type)) + simd_mode_size - 1)
+ / simd_mode_size);
+ }
+
size = int_size_in_bytes (type);
if (size < 0 || size > MOVE_MAX_PIECES * MOVE_RATIO (!optimize_size))
CASE_CONVERT:
case COMPLEX_EXPR:
case PAREN_EXPR:
+ case VIEW_CONVERT_EXPR:
return 0;
/* Assign cost of 1 to usual operations.
case POINTER_PLUS_EXPR:
case MINUS_EXPR:
case MULT_EXPR:
+ case FMA_EXPR:
case ADDR_SPACE_CONVERT_EXPR:
case FIXED_CONVERT_EXPR:
if (POINTER_TYPE_P (funtype))
funtype = TREE_TYPE (funtype);
- if (is_simple_builtin (decl))
+ /* Do not special case builtins where we see the body.
+ This just confuse inliner. */
+ if (!decl || cgraph_node (decl)->analyzed)
+ ;
+ /* For buitins that are likely expanded to nothing or
+ inlined do not account operand costs. */
+ else if (is_simple_builtin (decl))
return 0;
else if (is_inexpensive_builtin (decl))
- cost = weights->target_builtin_call_cost;
- else
- cost = weights->call_cost;
+ return weights->target_builtin_call_cost;
+ else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
+ {
+ /* We canonicalize x * x to pow (x, 2.0) with -ffast-math, so
+ specialize the cheap expansion we do here.
+ ??? This asks for a more general solution. */
+ switch (DECL_FUNCTION_CODE (decl))
+ {
+ case BUILT_IN_POW:
+ case BUILT_IN_POWF:
+ case BUILT_IN_POWL:
+ if (TREE_CODE (gimple_call_arg (stmt, 1)) == REAL_CST
+ && REAL_VALUES_EQUAL
+ (TREE_REAL_CST (gimple_call_arg (stmt, 1)), dconst2))
+ return estimate_operator_cost (MULT_EXPR, weights,
+ gimple_call_arg (stmt, 0),
+ gimple_call_arg (stmt, 0));
+ break;
+ default:
+ break;
+ }
+ }
+
+ cost = weights->call_cost;
if (decl)
funtype = TREE_TYPE (decl);
break;
}
+ case GIMPLE_RETURN:
+ return weights->return_cost;
+
case GIMPLE_GOTO:
case GIMPLE_LABEL:
case GIMPLE_NOP:
case GIMPLE_PHI:
- case GIMPLE_RETURN:
case GIMPLE_PREDICT:
case GIMPLE_DEBUG:
return 0;
eni_size_weights.div_mod_cost = 1;
eni_size_weights.omp_cost = 40;
eni_size_weights.time_based = false;
+ eni_size_weights.return_cost = 1;
/* Estimating time for call is difficult, since we have no idea what the
called function does. In the current uses of eni_time_weights,
underestimating the cost does less harm than overestimating it, so
we choose a rather small value here. */
eni_time_weights.call_cost = 10;
- eni_time_weights.target_builtin_call_cost = 10;
+ eni_time_weights.target_builtin_call_cost = 1;
eni_time_weights.div_mod_cost = 10;
eni_time_weights.omp_cost = 40;
eni_time_weights.time_based = true;
+ eni_time_weights.return_cost = 2;
}
/* Estimate the number of instructions in a gimple_seq. */
}
if (purge_dead_abnormal_edges)
- gimple_purge_dead_abnormal_call_edges (return_block);
+ {
+ gimple_purge_dead_eh_edges (return_block);
+ gimple_purge_dead_abnormal_call_edges (return_block);
+ }
/* If the value of the new expression is ignored, that's OK. We
don't warn about this for CALL_EXPRs, so we shouldn't warn about
basic_block bb;
int last = n_basic_blocks;
struct gimplify_ctx gctx;
+ bool inlined_p = false;
/* There is no point in performing inlining if errors have already
occurred -- and we might crash if we try to inline invalid
follow it; we'll trudge through them, processing their CALL_EXPRs
along the way. */
FOR_EACH_BB (bb)
- gimple_expand_calls_inline (bb, &id);
+ inlined_p |= gimple_expand_calls_inline (bb, &id);
pop_gimplify_context (NULL);
}
#endif
- /* Fold the statements before compacting/renumbering the basic blocks. */
+ /* Fold queued statements. */
fold_marked_statements (last, id.statements_to_fold);
pointer_set_destroy (id.statements_to_fold);
gcc_assert (!id.debug_stmts);
- /* Renumber the (code) basic_blocks consecutively. */
- compact_blocks ();
+ /* If we didn't inline into the function there is nothing to do. */
+ if (!inlined_p)
+ return 0;
+
/* Renumber the lexical scoping (non-code) blocks consecutively. */
number_blocks (fn);
- fold_cond_expr_cond ();
delete_unreachable_blocks_update_callgraph (&id);
#ifdef ENABLE_CHECKING
verify_cgraph_node (id.dst_node);
return (TODO_update_ssa
| TODO_cleanup_cfg
| (gimple_in_ssa_p (cfun) ? TODO_remove_unused_locals : 0)
+ | (gimple_in_ssa_p (cfun) ? TODO_update_address_taken : 0)
| (profile_status != PROFILE_ABSENT ? TODO_rebuild_frequencies : 0));
}
DECL_INITIAL (new_decl) = remap_blocks (DECL_INITIAL (id.src_fn), &id);
- /* Renumber the lexical scoping (non-code) blocks consecutively. */
- number_blocks (id.dst_fn);
-
declare_inline_vars (DECL_INITIAL (new_decl), vars);
if (!VEC_empty (tree, DECL_STRUCT_FUNCTION (old_decl)->local_decls))