#include "target.h"
/* expr.h is needed for MOVE_RATIO. */
#include "expr.h"
+#include "params.h"
/* This object of this pass is to replace a non-addressable aggregate with a
};
/* Random access to the child of a parent is performed by hashing.
- This prevents quadratic behaviour, and allows SRA to function
+ This prevents quadratic behavior, and allows SRA to function
reasonably on larger records. */
static htab_t sra_map;
if (bitmap_bit_p (sra_type_decomp_cache, cache+1))
return false;
- /* The type must have a definite non-zero size. */
+ /* The type must have a definite nonzero size. */
if (TYPE_SIZE (type) == NULL || integer_zerop (TYPE_SIZE (type)))
goto fail;
return true;
default:
- abort ();
+ gcc_unreachable ();
}
}
break;
default:
- abort ();
+ gcc_unreachable ();
}
return h;
/* Take into account everything back up the chain. Given that chain
lengths are rarely very long, this should be acceptable. If we
- truely identify this as a performance problem, it should work to
+ truly identify this as a performance problem, it should work to
hash the pointer value "e->parent". */
for (p = e->parent; p ; p = p->parent)
h = (h * 65521) ^ sra_hash_tree (p->element);
return fields_compatible_p (ae, be);
default:
- abort ();
+ gcc_unreachable ();
}
}
default:
#ifdef ENABLE_CHECKING
/* Validate that we're not missing any references. */
- if (walk_tree (&inner, sra_find_candidate_decl, NULL, NULL))
- abort ();
+ gcc_assert (!walk_tree (&inner, sra_find_candidate_decl, NULL, NULL));
#endif
return;
}
static const struct sra_walk_fns fns = {
scan_use, scan_copy, scan_init, scan_ldst, true
};
+ bitmap_iterator bi;
sra_walk_function (&fns);
if (dump_file && (dump_flags & TDF_DETAILS))
{
- size_t i;
+ unsigned i;
fputs ("\nScan results:\n", dump_file);
- EXECUTE_IF_SET_IN_BITMAP (sra_candidates, 0, i,
+ EXECUTE_IF_SET_IN_BITMAP (sra_candidates, 0, i, bi)
{
tree var = referenced_var (i);
struct sra_elt *elt = lookup_element (NULL, var, NULL, NO_INSERT);
if (elt)
scan_dump (elt);
- });
+ }
fputc ('\n', dump_file);
}
}
DECL_SOURCE_LOCATION (var) = DECL_SOURCE_LOCATION (base);
TREE_NO_WARNING (var) = TREE_NO_WARNING (base);
DECL_ARTIFICIAL (var) = DECL_ARTIFICIAL (base);
+ DECL_IGNORED_P (var) = DECL_IGNORED_P (base);
if (DECL_NAME (base) && !DECL_IGNORED_P (base))
{
break;
default:
- abort ();
+ gcc_unreachable ();
}
}
{
unsigned HOST_WIDE_INT full_size, inst_size = 0;
unsigned int inst_count;
+ unsigned int max_size;
+
+ /* If the sra-max-structure-size parameter is 0, then the
+ user has not overridden the parameter and we can choose a
+ sensible default. */
+ max_size = SRA_MAX_STRUCTURE_SIZE
+ ? SRA_MAX_STRUCTURE_SIZE
+ : MOVE_RATIO * UNITS_PER_WORD;
full_size = tree_low_cst (size_tree, 1);
/* If the structure is small, and we've made copies, go ahead
and instantiate, hoping that the copies will go away. */
- if (full_size <= (unsigned) MOVE_RATIO * UNITS_PER_WORD
+ if (full_size <= max_size
&& elt->n_copies > elt->n_uses)
use_block_copy = false;
else
{
inst_count = sum_instantiated_sizes (elt, &inst_size);
- if (inst_size * 4 >= full_size * 3)
+ if (inst_size * 100 >= full_size * SRA_FIELD_STRUCTURE_RATIO)
use_block_copy = false;
}
unsigned int i;
bool cleared_any;
struct bitmap_head_def done_head;
+ bitmap_iterator bi;
/* We cannot clear bits from a bitmap we're iterating over,
so save up all the bits to clear until the end. */
bitmap_initialize (&done_head, 1);
cleared_any = false;
- EXECUTE_IF_SET_IN_BITMAP (sra_candidates, 0, i,
+ EXECUTE_IF_SET_IN_BITMAP (sra_candidates, 0, i, bi)
{
tree var = referenced_var (i);
struct sra_elt *elt = lookup_element (NULL, var, NULL, NO_INSERT);
bitmap_set_bit (&done_head, i);
cleared_any = true;
}
- });
+ }
if (cleared_any)
{
- bitmap_operation (sra_candidates, sra_candidates, &done_head,
- BITMAP_AND_COMPL);
- bitmap_operation (needs_copy_in, needs_copy_in, &done_head,
- BITMAP_AND_COMPL);
+ bitmap_and_compl_into (sra_candidates, &done_head);
+ bitmap_and_compl_into (needs_copy_in, &done_head);
}
bitmap_clear (&done_head);
return build (IMAGPART_EXPR, elt->type, base);
default:
- abort ();
+ gcc_unreachable ();
}
}
for (dc = dst->children; dc ; dc = dc->sibling)
{
sc = lookup_element (src, dc->element, NULL, NO_INSERT);
- if (sc == NULL)
- abort ();
+ gcc_assert (sc);
generate_element_copy (dc, sc, list_p);
}
{
tree t;
- if (src->replacement == NULL)
- abort ();
+ gcc_assert (src->replacement);
t = build (MODIFY_EXPR, void_type_node, dst->replacement,
src->replacement);
{
tree t;
- if (elt->is_scalar)
- t = fold_convert (elt->type, integer_zero_node);
- else
- /* We generated a replacement for a non-scalar? */
- abort ();
+ gcc_assert (elt->is_scalar);
+ t = fold_convert (elt->type, integer_zero_node);
t = build (MODIFY_EXPR, void_type_node, elt->replacement, t);
append_to_statement_list (t, list_p);
}
}
-/* Find all variables within the gimplified statement that were not previously
- visible to the function and add them to the referenced variables list. */
-
-static tree
-find_new_referenced_vars_1 (tree *tp, int *walk_subtrees,
- void *data ATTRIBUTE_UNUSED)
-{
- tree t = *tp;
-
- if (TREE_CODE (t) == VAR_DECL && !var_ann (t))
- add_referenced_tmp_var (t);
-
- if (DECL_P (t) || TYPE_P (t))
- *walk_subtrees = 0;
-
- return NULL;
-}
-
-static inline void
-find_new_referenced_vars (tree *stmt_p)
-{
- walk_tree (stmt_p, find_new_referenced_vars_1, NULL, NULL);
-}
-
/* Generate an assignment VAR = INIT, where INIT may need gimplification.
Add the result to *LIST_P. */
insert_edge_copies (tree stmt, basic_block bb)
{
edge e;
+ edge_iterator ei;
bool first_copy;
first_copy = true;
- for (e = bb->succ; e; e = e->succ_next)
+ FOR_EACH_EDGE (e, ei, bb->succs)
{
/* We don't need to insert copies on abnormal edges. The
value of the scalar replacement is not guaranteed to
/* If we have two scalar operands, modify the existing statement. */
stmt = bsi_stmt (*bsi);
-#ifdef ENABLE_CHECKING
/* See the commentary in sra_walk_function concerning
RETURN_EXPR, and why we should never see one here. */
- if (TREE_CODE (stmt) != MODIFY_EXPR)
- abort ();
-#endif
+ gcc_assert (TREE_CODE (stmt) == MODIFY_EXPR);
TREE_OPERAND (stmt, 0) = lhs_elt->replacement;
TREE_OPERAND (stmt, 1) = rhs_elt->replacement;
list = NULL;
generate_element_copy (lhs_elt, rhs_elt, &list);
- if (list == NULL)
- abort ();
+ gcc_assert (list);
sra_replace (bsi, list);
}
}
{
/* The LHS is fully instantiated. The list of initializations
replaces the original structure assignment. */
- if (!list)
- abort ();
+ gcc_assert (list);
mark_all_v_defs (bsi_stmt (*bsi));
sra_replace (bsi, list);
}
TREE_THIS_NOTRAP (t) = 1;
*walk_subtrees = 0;
}
- else if (DECL_P (t) || TYPE_P (t))
+ else if (IS_TYPE_OR_DECL_P (t))
*walk_subtrees = 0;
return NULL;
block_stmt_iterator *bsi, bool is_output)
{
/* Shouldn't have gotten called for a scalar. */
- if (elt->replacement)
- abort ();
+ gcc_assert (!elt->replacement);
if (elt->use_block_copy)
{
mark_all_v_defs (stmt);
generate_copy_inout (elt, is_output, other, &list);
- if (list == NULL)
- abort ();
+ gcc_assert (list);
/* Preserve EH semantics. */
if (stmt_ends_bb_p (stmt))
scalarize_parms (void)
{
tree list = NULL;
- size_t i;
+ unsigned i;
+ bitmap_iterator bi;
- EXECUTE_IF_SET_IN_BITMAP (needs_copy_in, 0, i,
+ EXECUTE_IF_SET_IN_BITMAP (needs_copy_in, 0, i, bi)
{
tree var = referenced_var (i);
struct sra_elt *elt = lookup_element (NULL, var, NULL, NO_INSERT);
generate_copy_inout (elt, true, var, &list);
- });
+ }
if (list)
insert_edge_copies (list, ENTRY_BLOCK_PTR);
0, /* properties_destroyed */
0, /* todo_flags_start */
TODO_dump_func | TODO_rename_vars
- | TODO_ggc_collect | TODO_verify_ssa /* todo_flags_finish */
+ | TODO_ggc_collect | TODO_verify_ssa, /* todo_flags_finish */
+ 0 /* letter */
};