/* Data flow functions for trees.
- Copyright (C) 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
+ Copyright (C) 2001, 2002, 2003, 2004, 2005, 2007, 2008 Free Software
+ Foundation, Inc.
Contributed by Diego Novillo <dnovillo@redhat.com>
This file is part of GCC.
GCC is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
-the Free Software Foundation; either version 2, or (at your option)
+the Free Software Foundation; either version 3, or (at your option)
any later version.
GCC is distributed in the hope that it will be useful,
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
-along with GCC; see the file COPYING. If not, write to
-the Free Software Foundation, 59 Temple Place - Suite 330,
-Boston, MA 02111-1307, USA. */
+along with GCC; see the file COPYING3. If not see
+<http://www.gnu.org/licenses/>. */
#include "config.h"
#include "system.h"
#include "hard-reg-set.h"
#include "basic-block.h"
#include "output.h"
-#include "errors.h"
#include "timevar.h"
#include "expr.h"
#include "ggc.h"
#include "function.h"
#include "diagnostic.h"
#include "tree-dump.h"
-#include "tree-gimple.h"
+#include "gimple.h"
#include "tree-flow.h"
#include "tree-inline.h"
#include "tree-pass.h"
/* Counters used to display DFA and SSA statistics. */
struct dfa_stats_d
{
- long num_stmt_anns;
long num_var_anns;
long num_defs;
long num_uses;
long num_phis;
long num_phi_args;
- int max_num_phi_args;
- long num_v_may_defs;
+ size_t max_num_phi_args;
+ long num_vdefs;
long num_vuses;
- long num_v_must_defs;
-};
-
-
-/* State information for find_vars_r. */
-struct walk_state
-{
- /* Hash table used to avoid adding the same variable more than once. */
- htab_t vars_found;
};
/* Local functions. */
static void collect_dfa_stats (struct dfa_stats_d *);
-static tree collect_dfa_stats_r (tree *, int *, void *);
-static void add_immediate_use (tree, tree);
static tree find_vars_r (tree *, int *, void *);
-static void add_referenced_var (tree, struct walk_state *);
-static void compute_immediate_uses_for_phi (tree, bool (*)(tree));
-static void compute_immediate_uses_for_stmt (tree, int, bool (*)(tree));
-
-
-/* Global declarations. */
-
-/* Array of all variables referenced in the function. */
-varray_type referenced_vars;
/*---------------------------------------------------------------------------
various attributes for each variable used by alias analysis and the
optimizer. */
-static void
+static unsigned int
find_referenced_vars (void)
{
- htab_t vars_found;
basic_block bb;
- block_stmt_iterator si;
- struct walk_state walk_state;
-
- vars_found = htab_create (50, htab_hash_pointer, htab_eq_pointer, NULL);
- memset (&walk_state, 0, sizeof (walk_state));
- walk_state.vars_found = vars_found;
+ gimple_stmt_iterator si;
FOR_EACH_BB (bb)
- for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
- {
- tree *stmt_p = bsi_stmt_ptr (si);
- walk_tree (stmt_p, find_vars_r, &walk_state, NULL);
- }
+ {
+ for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
+ {
+ size_t i;
+ gimple stmt = gsi_stmt (si);
+ for (i = 0; i < gimple_num_ops (stmt); i++)
+ walk_tree (gimple_op_ptr (stmt, i), find_vars_r, NULL, NULL);
+ }
+
+ for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
+ {
+ gimple phi = gsi_stmt (si);
+ size_t i, len = gimple_phi_num_args (phi);
+
+ walk_tree (gimple_phi_result_ptr (phi), find_vars_r, NULL, NULL);
- htab_delete (vars_found);
+ for (i = 0; i < len; i++)
+ {
+ tree arg = gimple_phi_arg_def (phi, i);
+ walk_tree (&arg, find_vars_r, NULL, NULL);
+ }
+ }
+ }
+
+ return 0;
}
-struct tree_opt_pass pass_referenced_vars =
+struct gimple_opt_pass pass_referenced_vars =
{
+ {
+ GIMPLE_PASS,
NULL, /* name */
NULL, /* gate */
find_referenced_vars, /* execute */
PROP_gimple_leh | PROP_cfg, /* properties_required */
PROP_referenced_vars, /* properties_provided */
0, /* properties_destroyed */
- 0, /* todo_flags_start */
- 0, /* todo_flags_finish */
- 0 /* letter */
+ TODO_dump_func, /* todo_flags_start */
+ TODO_dump_func /* todo_flags_finish */
+ }
};
-/* Compute immediate uses.
-
- CALC_FOR is an optional function pointer which indicates whether
- immediate uses information should be calculated for a given SSA
- variable. If NULL, then information is computed for all
- variables.
-
- FLAGS is one of {TDFA_USE_OPS, TDFA_USE_VOPS}. It is used by
- compute_immediate_uses_for_stmt to determine whether to look at
- virtual and/or real operands while computing def-use chains. */
-
-void
-compute_immediate_uses (int flags, bool (*calc_for)(tree))
-{
- basic_block bb;
- block_stmt_iterator si;
-
- FOR_EACH_BB (bb)
- {
- tree phi;
-
- for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
- {
- if (is_gimple_reg (PHI_RESULT (phi)))
- {
- if (!(flags & TDFA_USE_OPS))
- continue;
- }
- else
- {
- if (!(flags & TDFA_USE_VOPS))
- continue;
- }
-
- compute_immediate_uses_for_phi (phi, calc_for);
- }
-
- for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
- {
- tree stmt = bsi_stmt (si);
- get_stmt_operands (stmt);
- compute_immediate_uses_for_stmt (stmt, flags, calc_for);
- }
- }
-}
-
-
-/* Invalidates dataflow information for a statement STMT. */
-
-void
-free_df_for_stmt (tree stmt)
-{
- dataflow_t *df;
-
- if (TREE_CODE (stmt) == PHI_NODE)
- df = &PHI_DF (stmt);
- else
- {
- stmt_ann_t ann = stmt_ann (stmt);
-
- if (!ann)
- return;
-
- df = &ann->df;
- }
-
- if (!*df)
- return;
-
- /* If we have a varray of immediate uses, then go ahead and release
- it for re-use. */
- if ((*df)->immediate_uses)
- ggc_free ((*df)->immediate_uses);
-
- /* Similarly for the main dataflow structure. */
- ggc_free (*df);
- *df = NULL;
-}
-
-
-/* Invalidate dataflow information for the whole function.
-
- Note this only invalidates dataflow information on statements and
- PHI nodes which are reachable.
-
- A deleted statement may still have attached dataflow information
- on it. */
-
-void
-free_df (void)
-{
- basic_block bb;
- block_stmt_iterator si;
-
- FOR_EACH_BB (bb)
- {
- tree phi;
-
- for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
- free_df_for_stmt (phi);
-
- for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
- {
- tree stmt = bsi_stmt (si);
- free_df_for_stmt (stmt);
- }
- }
-}
-
-
-/* Helper for compute_immediate_uses. Check all the USE and/or VUSE
- operands in phi node PHI and add a def-use edge between their
- defining statement and PHI. CALC_FOR is as in
- compute_immediate_uses.
-
- PHI nodes are easy, we only need to look at their arguments. */
-
-static void
-compute_immediate_uses_for_phi (tree phi, bool (*calc_for)(tree))
-{
- int i;
-
- gcc_assert (TREE_CODE (phi) == PHI_NODE);
-
- for (i = 0; i < PHI_NUM_ARGS (phi); i++)
- {
- tree arg = PHI_ARG_DEF (phi, i);
-
- if (TREE_CODE (arg) == SSA_NAME && (!calc_for || calc_for (arg)))
- {
- tree imm_rdef_stmt = SSA_NAME_DEF_STMT (PHI_ARG_DEF (phi, i));
- if (!IS_EMPTY_STMT (imm_rdef_stmt))
- add_immediate_use (imm_rdef_stmt, phi);
- }
- }
-}
-
-
-/* Another helper for compute_immediate_uses. Depending on the value
- of FLAGS, check all the USE and/or VUSE operands in STMT and add a
- def-use edge between their defining statement and STMT. CALC_FOR
- is as in compute_immediate_uses. */
-
-static void
-compute_immediate_uses_for_stmt (tree stmt, int flags, bool (*calc_for)(tree))
-{
- tree use;
- ssa_op_iter iter;
-
- /* PHI nodes are handled elsewhere. */
- gcc_assert (TREE_CODE (stmt) != PHI_NODE);
-
- /* Look at USE_OPS or VUSE_OPS according to FLAGS. */
- if (flags & TDFA_USE_OPS)
- {
- FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_USE)
- {
- tree imm_stmt = SSA_NAME_DEF_STMT (use);
- if (!IS_EMPTY_STMT (imm_stmt) && (!calc_for || calc_for (use)))
- add_immediate_use (imm_stmt, stmt);
- }
- }
-
- if (flags & TDFA_USE_VOPS)
- {
- FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_VIRTUAL_USES)
- {
- tree imm_rdef_stmt = SSA_NAME_DEF_STMT (use);
- if (!IS_EMPTY_STMT (imm_rdef_stmt) && (!calc_for || calc_for (use)))
- add_immediate_use (imm_rdef_stmt, stmt);
- }
-
- FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_ALL_KILLS)
- {
- tree imm_rdef_stmt = SSA_NAME_DEF_STMT (use);
- if (!IS_EMPTY_STMT (imm_rdef_stmt) && (!calc_for || calc_for (use)))
- add_immediate_use (imm_rdef_stmt, stmt);
- }
- }
-}
-
-
-/* Add statement USE_STMT to the list of statements that use definitions
- made by STMT. */
-
-static void
-add_immediate_use (tree stmt, tree use_stmt)
-{
- struct dataflow_d **df;
-
- if (TREE_CODE (stmt) == PHI_NODE)
- df = &PHI_DF (stmt);
- else
- {
- stmt_ann_t ann = get_stmt_ann (stmt);
- df = &ann->df;
- }
-
- if (*df == NULL)
- {
- *df = ggc_alloc (sizeof (struct dataflow_d));
- memset ((void *) *df, 0, sizeof (struct dataflow_d));
- (*df)->uses[0] = use_stmt;
- return;
- }
-
- if (!(*df)->uses[1])
- {
- (*df)->uses[1] = use_stmt;
- return;
- }
-
- if ((*df)->immediate_uses == NULL)
- VARRAY_TREE_INIT ((*df)->immediate_uses, 4, "immediate_uses");
-
- VARRAY_PUSH_TREE ((*df)->immediate_uses, use_stmt);
-}
-
-
-/* If the immediate use of USE points to OLD, then redirect it to NEW. */
-
-static void
-redirect_immediate_use (tree use, tree old, tree new)
-{
- tree imm_stmt = SSA_NAME_DEF_STMT (use);
- struct dataflow_d *df = get_immediate_uses (imm_stmt);
- unsigned int num_uses = num_immediate_uses (df);
- unsigned int i;
-
- for (i = 0; i < num_uses; i++)
- {
- if (immediate_use (df, i) == old)
- {
- if (i == 0 || i == 1)
- df->uses[i] = new;
- else
- VARRAY_TREE (df->immediate_uses, i - 2) = new;
- }
- }
-}
-
-
-/* Redirect all immediate uses for operands in OLD so that they point
- to NEW. This routine should have no knowledge of how immediate
- uses are stored. */
-
-void
-redirect_immediate_uses (tree old, tree new)
-{
- ssa_op_iter iter;
- tree val;
-
- FOR_EACH_SSA_TREE_OPERAND (val, old, iter, SSA_OP_ALL_USES)
- redirect_immediate_use (val, old, new);
-}
-
-
/*---------------------------------------------------------------------------
Manage annotations
---------------------------------------------------------------------------*/
gcc_assert (t);
gcc_assert (DECL_P (t));
- gcc_assert (!t->common.ann || t->common.ann->common.type == VAR_ANN);
-
- ann = ggc_alloc (sizeof (*ann));
- memset ((void *) ann, 0, sizeof (*ann));
+ gcc_assert (!t->base.ann || t->base.ann->common.type == VAR_ANN);
+ ann = GGC_CNEW (struct var_ann_d);
ann->common.type = VAR_ANN;
-
- t->common.ann = (tree_ann_t) ann;
+ t->base.ann = (tree_ann_t) ann;
return ann;
}
+/* Create a new annotation for a FUNCTION_DECL node T. */
-/* Create a new annotation for a statement node T. */
-
-stmt_ann_t
-create_stmt_ann (tree t)
+function_ann_t
+create_function_ann (tree t)
{
- stmt_ann_t ann;
+ function_ann_t ann;
- gcc_assert (is_gimple_stmt (t));
- gcc_assert (!t->common.ann || t->common.ann->common.type == STMT_ANN);
+ gcc_assert (t);
+ gcc_assert (TREE_CODE (t) == FUNCTION_DECL);
+ gcc_assert (!t->base.ann || t->base.ann->common.type == FUNCTION_ANN);
- ann = ggc_alloc (sizeof (*ann));
+ ann = (function_ann_t) ggc_alloc (sizeof (*ann));
memset ((void *) ann, 0, sizeof (*ann));
- ann->common.type = STMT_ANN;
-
- /* Since we just created the annotation, mark the statement modified. */
- ann->modified = true;
+ ann->common.type = FUNCTION_ANN;
- t->common.ann = (tree_ann_t) ann;
+ t->base.ann = (tree_ann_t) ann;
return ann;
}
+/* Renumber all of the gimple stmt uids. */
+
+void
+renumber_gimple_stmt_uids (void)
+{
+ basic_block bb;
+
+ set_gimple_stmt_max_uid (cfun, 0);
+ FOR_ALL_BB (bb)
+ {
+ gimple_stmt_iterator bsi;
+ for (bsi = gsi_start_bb (bb); !gsi_end_p (bsi); gsi_next (&bsi))
+ {
+ gimple stmt = gsi_stmt (bsi);
+ gimple_set_uid (stmt, inc_gimple_stmt_max_uid (cfun));
+ }
+ }
+}
/* Create a new annotation for a tree T. */
-tree_ann_t
-create_tree_ann (tree t)
+tree_ann_common_t
+create_tree_common_ann (tree t)
{
- tree_ann_t ann;
+ tree_ann_common_t ann;
gcc_assert (t);
- gcc_assert (!t->common.ann || t->common.ann->common.type == TREE_ANN_COMMON);
+ gcc_assert (!t->base.ann || t->base.ann->common.type == TREE_ANN_COMMON);
- ann = ggc_alloc (sizeof (*ann));
- memset ((void *) ann, 0, sizeof (*ann));
+ ann = GGC_CNEW (struct tree_ann_common_d);
- ann->common.type = TREE_ANN_COMMON;
- t->common.ann = ann;
+ ann->type = TREE_ANN_COMMON;
+ ann->rn = -1;
+ t->base.ann = (tree_ann_t) ann;
return ann;
}
make_rename_temp (tree type, const char *prefix)
{
tree t = create_tmp_var (type, prefix);
- if (referenced_vars)
+
+ if (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
+ || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
+ DECL_GIMPLE_REG_P (t) = 1;
+
+ if (gimple_referenced_vars (cfun))
{
- add_referenced_tmp_var (t);
- bitmap_set_bit (vars_to_rename, var_ann (t)->uid);
+ add_referenced_var (t);
+ mark_sym_for_renaming (t);
}
+
return t;
}
void
dump_referenced_vars (FILE *file)
{
- size_t i;
-
+ tree var;
+ referenced_var_iterator rvi;
+
fprintf (file, "\nReferenced variables in %s: %u\n\n",
get_name (current_function_decl), (unsigned) num_referenced_vars);
-
- for (i = 0; i < num_referenced_vars; i++)
+
+ FOR_EACH_REFERENCED_VAR (var, rvi)
{
- tree var = referenced_var (i);
fprintf (file, "Variable: ");
dump_variable (file, var);
fprintf (file, "\n");
ann = var_ann (var);
- fprintf (file, ", UID %u", (unsigned) ann->uid);
+ fprintf (file, ", UID D.%u", (unsigned) DECL_UID (var));
fprintf (file, ", ");
print_generic_expr (file, TREE_TYPE (var), dump_flags);
- if (ann->type_mem_tag)
+ if (ann && ann->symbol_mem_tag)
{
- fprintf (file, ", type memory tag: ");
- print_generic_expr (file, ann->type_mem_tag, dump_flags);
+ fprintf (file, ", symbol memory tag: ");
+ print_generic_expr (file, ann->symbol_mem_tag, dump_flags);
}
- if (ann->is_alias_tag)
- fprintf (file, ", is an alias tag");
-
if (TREE_ADDRESSABLE (var))
fprintf (file, ", is addressable");
if (TREE_THIS_VOLATILE (var))
fprintf (file, ", is volatile");
+ dump_mem_sym_stats_for_var (file, var);
+
if (is_call_clobbered (var))
- fprintf (file, ", call clobbered");
+ {
+ const char *s = "";
+ var_ann_t va = var_ann (var);
+ unsigned int escape_mask = va->escape_mask;
+
+ fprintf (file, ", call clobbered");
+ fprintf (file, " (");
+ if (escape_mask & ESCAPE_STORED_IN_GLOBAL)
+ { fprintf (file, "%sstored in global", s); s = ", "; }
+ if (escape_mask & ESCAPE_TO_ASM)
+ { fprintf (file, "%sgoes through ASM", s); s = ", "; }
+ if (escape_mask & ESCAPE_TO_CALL)
+ { fprintf (file, "%spassed to call", s); s = ", "; }
+ if (escape_mask & ESCAPE_BAD_CAST)
+ { fprintf (file, "%sbad cast", s); s = ", "; }
+ if (escape_mask & ESCAPE_TO_RETURN)
+ { fprintf (file, "%sreturned from func", s); s = ", "; }
+ if (escape_mask & ESCAPE_TO_PURE_CONST)
+ { fprintf (file, "%spassed to pure/const", s); s = ", "; }
+ if (escape_mask & ESCAPE_IS_GLOBAL)
+ { fprintf (file, "%sis global var", s); s = ", "; }
+ if (escape_mask & ESCAPE_IS_PARM)
+ { fprintf (file, "%sis incoming pointer", s); s = ", "; }
+ if (escape_mask & ESCAPE_UNKNOWN)
+ { fprintf (file, "%sunknown escape", s); s = ", "; }
+ fprintf (file, ")");
+ }
+
+ if (ann->noalias_state == NO_ALIAS)
+ fprintf (file, ", NO_ALIAS (does not alias other NO_ALIAS symbols)");
+ else if (ann->noalias_state == NO_ALIAS_GLOBAL)
+ fprintf (file, ", NO_ALIAS_GLOBAL (does not alias other NO_ALIAS symbols"
+ " and global vars)");
+ else if (ann->noalias_state == NO_ALIAS_ANYTHING)
+ fprintf (file, ", NO_ALIAS_ANYTHING (does not alias any other symbols)");
- if (ann->default_def)
+ if (gimple_default_def (cfun, var))
{
fprintf (file, ", default def: ");
- print_generic_expr (file, ann->default_def, dump_flags);
+ print_generic_expr (file, gimple_default_def (cfun, var), dump_flags);
}
- if (ann->may_aliases)
+ if (MTAG_P (var) && may_aliases (var))
{
fprintf (file, ", may aliases: ");
dump_may_aliases_for (file, var);
}
- fprintf (file, "\n");
-}
-
-
-/* Dump variable VAR and its may-aliases to stderr. */
-
-void
-debug_variable (tree var)
-{
- dump_variable (stderr, var);
-}
-
-
-/* Dump def-use edges on FILE. */
-
-void
-dump_immediate_uses (FILE *file)
-{
- basic_block bb;
- block_stmt_iterator si;
- const char *funcname
- = lang_hooks.decl_printable_name (current_function_decl, 2);
-
- fprintf (file, "\nDef-use edges for function %s\n", funcname);
-
- FOR_EACH_BB (bb)
+ if (!is_gimple_reg (var))
{
- tree phi;
-
- for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
- dump_immediate_uses_for (file, phi);
-
- for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
- dump_immediate_uses_for (file, bsi_stmt (si));
- }
-
- fprintf (file, "\n");
-}
-
-
-/* Dump def-use edges on stderr. */
-
-void
-debug_immediate_uses (void)
-{
- dump_immediate_uses (stderr);
-}
-
-
-/* Dump all immediate uses for STMT on FILE. */
-
-void
-dump_immediate_uses_for (FILE *file, tree stmt)
-{
- dataflow_t df = get_immediate_uses (stmt);
- int num_imm_uses = num_immediate_uses (df);
-
- if (num_imm_uses > 0)
- {
- int i;
-
- fprintf (file, "-> ");
- print_generic_stmt (file, stmt, TDF_SLIM);
- fprintf (file, "\n");
-
- for (i = 0; i < num_imm_uses; i++)
+ if (memory_partition (var))
{
- fprintf (file, "\t");
- print_generic_stmt (file, immediate_use (df, i), TDF_SLIM);
- fprintf (file, "\n");
+ fprintf (file, ", belongs to partition: ");
+ print_generic_expr (file, memory_partition (var), dump_flags);
}
- fprintf (file, "\n");
+ if (TREE_CODE (var) == MEMORY_PARTITION_TAG)
+ {
+ fprintf (file, ", partition symbols: ");
+ dump_decl_set (file, MPT_SYMBOLS (var));
+ }
}
+
+ fprintf (file, "\n");
}
-/* Dump immediate uses for STMT on stderr. */
+/* Dump variable VAR and its may-aliases to stderr. */
void
-debug_immediate_uses_for (tree stmt)
+debug_variable (tree var)
{
- dump_immediate_uses_for (stderr, stmt);
+ dump_variable (stderr, var);
}
fprintf (file, fmt_str_1, "Referenced variables", (unsigned long)num_referenced_vars,
SCALE (size), LABEL (size));
- size = dfa_stats.num_stmt_anns * sizeof (struct stmt_ann_d);
- total += size;
- fprintf (file, fmt_str_1, "Statements annotated", dfa_stats.num_stmt_anns,
- SCALE (size), LABEL (size));
-
size = dfa_stats.num_var_anns * sizeof (struct var_ann_d);
total += size;
fprintf (file, fmt_str_1, "Variables annotated", dfa_stats.num_var_anns,
fprintf (file, fmt_str_1, "VUSE operands", dfa_stats.num_vuses,
SCALE (size), LABEL (size));
- size = dfa_stats.num_v_may_defs * sizeof (tree *);
- total += size;
- fprintf (file, fmt_str_1, "V_MAY_DEF operands", dfa_stats.num_v_may_defs,
- SCALE (size), LABEL (size));
-
- size = dfa_stats.num_v_must_defs * sizeof (tree *);
+ size = dfa_stats.num_vdefs * sizeof (tree *);
total += size;
- fprintf (file, fmt_str_1, "V_MUST_DEF operands", dfa_stats.num_v_must_defs,
+ fprintf (file, fmt_str_1, "VDEF operands", dfa_stats.num_vdefs,
SCALE (size), LABEL (size));
- size = dfa_stats.num_phis * sizeof (struct tree_phi_node);
+ size = dfa_stats.num_phis * sizeof (struct gimple_statement_phi);
total += size;
fprintf (file, fmt_str_1, "PHI nodes", dfa_stats.num_phis,
SCALE (size), LABEL (size));
fprintf (file, "\n");
if (dfa_stats.num_phis)
- fprintf (file, "Average number of arguments per PHI node: %.1f (max: %d)\n",
+ fprintf (file, "Average number of arguments per PHI node: %.1f (max: %ld)\n",
(float) dfa_stats.num_phi_args / (float) dfa_stats.num_phis,
- dfa_stats.max_num_phi_args);
+ (long) dfa_stats.max_num_phi_args);
fprintf (file, "\n");
}
}
-/* Collect DFA statistics and store them in the structure pointed by
+/* Collect DFA statistics and store them in the structure pointed to by
DFA_STATS_P. */
static void
-collect_dfa_stats (struct dfa_stats_d *dfa_stats_p)
+collect_dfa_stats (struct dfa_stats_d *dfa_stats_p ATTRIBUTE_UNUSED)
{
- struct pointer_set_t *pset;
basic_block bb;
- block_stmt_iterator i;
+ referenced_var_iterator vi;
+ tree var;
gcc_assert (dfa_stats_p);
memset ((void *)dfa_stats_p, 0, sizeof (struct dfa_stats_d));
- /* Walk all the trees in the function counting references. Start at
- basic block 0, but don't stop at block boundaries. */
- pset = pointer_set_create ();
-
- for (i = bsi_start (BASIC_BLOCK (0)); !bsi_end_p (i); bsi_next (&i))
- walk_tree (bsi_stmt_ptr (i), collect_dfa_stats_r, (void *) dfa_stats_p,
- pset);
-
- pointer_set_destroy (pset);
+ /* Count all the variable annotations. */
+ FOR_EACH_REFERENCED_VAR (var, vi)
+ if (var_ann (var))
+ dfa_stats_p->num_var_anns++;
+ /* Walk all the statements in the function counting references. */
FOR_EACH_BB (bb)
{
- tree phi;
- for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
+ gimple_stmt_iterator si;
+
+ for (si = gsi_start_phis (bb); !gsi_end_p (si); gsi_next (&si))
{
+ gimple phi = gsi_stmt (si);
dfa_stats_p->num_phis++;
- dfa_stats_p->num_phi_args += PHI_NUM_ARGS (phi);
- if (PHI_NUM_ARGS (phi) > dfa_stats_p->max_num_phi_args)
- dfa_stats_p->max_num_phi_args = PHI_NUM_ARGS (phi);
+ dfa_stats_p->num_phi_args += gimple_phi_num_args (phi);
+ if (gimple_phi_num_args (phi) > dfa_stats_p->max_num_phi_args)
+ dfa_stats_p->max_num_phi_args = gimple_phi_num_args (phi);
}
- }
-}
-
-/* Callback for walk_tree to collect DFA statistics for a tree and its
- children. */
-
-static tree
-collect_dfa_stats_r (tree *tp, int *walk_subtrees ATTRIBUTE_UNUSED,
- void *data)
-{
- tree t = *tp;
- struct dfa_stats_d *dfa_stats_p = (struct dfa_stats_d *)data;
-
- if (t->common.ann)
- {
- switch (ann_type (t->common.ann))
+ for (si = gsi_start_bb (bb); !gsi_end_p (si); gsi_next (&si))
{
- case STMT_ANN:
- {
- stmt_ann_t ann = (stmt_ann_t) t->common.ann;
- dfa_stats_p->num_stmt_anns++;
- dfa_stats_p->num_defs += NUM_DEFS (DEF_OPS (ann));
- dfa_stats_p->num_uses += NUM_USES (USE_OPS (ann));
- dfa_stats_p->num_v_may_defs +=
- NUM_V_MAY_DEFS (V_MAY_DEF_OPS (ann));
- dfa_stats_p->num_vuses += NUM_VUSES (VUSE_OPS (ann));
- dfa_stats_p->num_v_must_defs +=
- NUM_V_MUST_DEFS (V_MUST_DEF_OPS (ann));
- break;
- }
-
- case VAR_ANN:
- dfa_stats_p->num_var_anns++;
- break;
-
- default:
- break;
+ gimple stmt = gsi_stmt (si);
+ dfa_stats_p->num_defs += NUM_SSA_OPERANDS (stmt, SSA_OP_DEF);
+ dfa_stats_p->num_uses += NUM_SSA_OPERANDS (stmt, SSA_OP_USE);
+ dfa_stats_p->num_vdefs += NUM_SSA_OPERANDS (stmt, SSA_OP_VDEF);
+ dfa_stats_p->num_vuses += NUM_SSA_OPERANDS (stmt, SSA_OP_VUSE);
}
}
-
- return NULL;
}
the function. */
static tree
-find_vars_r (tree *tp, int *walk_subtrees, void *data)
+find_vars_r (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
{
- struct walk_state *walk_state = (struct walk_state *) data;
+ /* If we are reading the lto info back in, we need to rescan the
+ referenced vars. */
+ if (TREE_CODE (*tp) == SSA_NAME)
+ add_referenced_var (SSA_NAME_VAR (*tp));
/* If T is a regular variable that the optimizers are interested
in, add it to the list of variables. */
- if (SSA_VAR_P (*tp))
- add_referenced_var (*tp, walk_state);
+ else if (SSA_VAR_P (*tp))
+ add_referenced_var (*tp);
/* Type, _DECL and constant nodes have no interesting children.
Ignore them. */
return NULL_TREE;
}
+/* Lookup UID in the referenced_vars hashtable and return the associated
+ variable. */
-/* Add VAR to the list of dereferenced variables.
+tree
+referenced_var_lookup (unsigned int uid)
+{
+ tree h;
+ struct tree_decl_minimal in;
+ in.uid = uid;
+ h = (tree) htab_find_with_hash (gimple_referenced_vars (cfun), &in, uid);
+ gcc_assert (h || uid == 0);
+ return h;
+}
- WALK_STATE contains a hash table used to avoid adding the same
- variable more than once. Note that this function assumes that
- VAR is a valid SSA variable. If WALK_STATE is NULL, no
- duplicate checking is done. */
+/* Check if TO is in the referenced_vars hash table and insert it if not.
+ Return true if it required insertion. */
-static void
-add_referenced_var (tree var, struct walk_state *walk_state)
+bool
+referenced_var_check_and_insert (tree to)
+{
+ tree h, *loc;
+ struct tree_decl_minimal in;
+ unsigned int uid = DECL_UID (to);
+
+ in.uid = uid;
+ h = (tree) htab_find_with_hash (gimple_referenced_vars (cfun), &in, uid);
+ if (h)
+ {
+ /* DECL_UID has already been entered in the table. Verify that it is
+ the same entry as TO. See PR 27793. */
+ gcc_assert (h == to);
+ return false;
+ }
+
+ loc = (tree *) htab_find_slot_with_hash (gimple_referenced_vars (cfun),
+ &in, uid, INSERT);
+ *loc = to;
+ return true;
+}
+
+/* Lookup VAR UID in the default_defs hashtable and return the associated
+ variable. */
+
+tree
+gimple_default_def (struct function *fn, tree var)
{
- void **slot;
- var_ann_t v_ann;
+ struct tree_decl_minimal ind;
+ struct tree_ssa_name in;
+ gcc_assert (SSA_VAR_P (var));
+ in.var = (tree)&ind;
+ ind.uid = DECL_UID (var);
+ return (tree) htab_find_with_hash (DEFAULT_DEFS (fn), &in, DECL_UID (var));
+}
- v_ann = get_var_ann (var);
+/* Insert the pair VAR's UID, DEF into the default_defs hashtable. */
- if (walk_state)
- slot = htab_find_slot (walk_state->vars_found, (void *) var, INSERT);
- else
- slot = NULL;
+void
+set_default_def (tree var, tree def)
+{
+ struct tree_decl_minimal ind;
+ struct tree_ssa_name in;
+ void **loc;
- if (slot == NULL || *slot == NULL)
+ gcc_assert (SSA_VAR_P (var));
+ in.var = (tree)&ind;
+ ind.uid = DECL_UID (var);
+ if (!def)
{
- /* This is the first time we find this variable, add it to the
- REFERENCED_VARS array and annotate it with attributes that are
- intrinsic to the variable. */
- if (slot)
- *slot = (void *) var;
- v_ann->uid = num_referenced_vars;
- VARRAY_PUSH_TREE (referenced_vars, var);
-
- /* Global variables are always call-clobbered. */
- if (is_global_var (var))
- mark_call_clobbered (var);
+ loc = htab_find_slot_with_hash (DEFAULT_DEFS (cfun), &in,
+ DECL_UID (var), INSERT);
+ gcc_assert (*loc);
+ htab_remove_elt (DEFAULT_DEFS (cfun), *loc);
+ return;
+ }
+ gcc_assert (TREE_CODE (def) == SSA_NAME && SSA_NAME_VAR (def) == var);
+ loc = htab_find_slot_with_hash (DEFAULT_DEFS (cfun), &in,
+ DECL_UID (var), INSERT);
+
+ /* Default definition might be changed by tail call optimization. */
+ if (*loc)
+ SSA_NAME_IS_DEFAULT_DEF (*(tree *) loc) = false;
+ *(tree *) loc = def;
+
+ /* Mark DEF as the default definition for VAR. */
+ SSA_NAME_IS_DEFAULT_DEF (def) = true;
+}
+
+/* Add VAR to the list of referenced variables if it isn't already there. */
+
+void
+add_referenced_var (tree var)
+{
+ var_ann_t v_ann;
+
+ v_ann = get_var_ann (var);
+ gcc_assert (DECL_P (var));
+
+ /* Insert VAR into the referenced_vars has table if it isn't present. */
+ if (referenced_var_check_and_insert (var))
+ {
+ /* This is the first time we found this variable, annotate it with
+ attributes that are intrinsic to the variable. */
+
+ /* Tag's don't have DECL_INITIAL. */
+ if (MTAG_P (var))
+ return;
/* Scan DECL_INITIAL for pointer variables as they may contain
address arithmetic referencing the address of other
- variables. */
+ variables.
+ Even non-constant initializers need to be walked, because
+ IPA passes might prove that their are invariant later on. */
if (DECL_INITIAL (var)
- && POINTER_TYPE_P (TREE_TYPE (var)))
- walk_tree (&DECL_INITIAL (var), find_vars_r, walk_state, 0);
+ /* Initializers of external variables are not useful to the
+ optimizers. */
+ && !DECL_EXTERNAL (var))
+ walk_tree (&DECL_INITIAL (var), find_vars_r, NULL, 0);
}
}
+/* Remove VAR from the list. */
+
+void
+remove_referenced_var (tree var)
+{
+ var_ann_t v_ann;
+ struct tree_decl_minimal in;
+ void **loc;
+ unsigned int uid = DECL_UID (var);
+
+ clear_call_clobbered (var);
+ bitmap_clear_bit (gimple_call_used_vars (cfun), uid);
+ if ((v_ann = var_ann (var)))
+ {
+ /* Preserve var_anns of globals, but clear their alias info. */
+ if (MTAG_P (var)
+ || (!TREE_STATIC (var) && !DECL_EXTERNAL (var)))
+ {
+ ggc_free (v_ann);
+ var->base.ann = NULL;
+ }
+ else
+ {
+ v_ann->mpt = NULL_TREE;
+ v_ann->symbol_mem_tag = NULL_TREE;
+ }
+ }
+ gcc_assert (DECL_P (var));
+ in.uid = uid;
+ loc = htab_find_slot_with_hash (gimple_referenced_vars (cfun), &in, uid,
+ NO_INSERT);
+ htab_clear_slot (gimple_referenced_vars (cfun), loc);
+}
+
/* Return the virtual variable associated to the non-scalar variable VAR. */
return var;
}
-/* Add a temporary variable to REFERENCED_VARS. This is similar to
- add_referenced_var, but is used by passes that need to add new temps to
- the REFERENCED_VARS array after the program has been scanned for
- variables. The variable will just receive a new UID and be added
- to the REFERENCED_VARS array without checking for duplicates. */
+/* Mark all the naked symbols in STMT for SSA renaming.
+
+ NOTE: This function should only be used for brand new statements.
+ If the caller is modifying an existing statement, it should use the
+ combination push_stmt_changes/pop_stmt_changes. */
void
-add_referenced_tmp_var (tree var)
+mark_symbols_for_renaming (gimple stmt)
{
- add_referenced_var (var, NULL);
+ tree op;
+ ssa_op_iter iter;
+
+ update_stmt (stmt);
+
+ /* Mark all the operands for renaming. */
+ FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_ALL_OPERANDS)
+ if (DECL_P (op))
+ mark_sym_for_renaming (op);
+}
+
+
+/* Find all variables within the gimplified statement that were not
+ previously visible to the function and add them to the referenced
+ variables list. */
+
+static tree
+find_new_referenced_vars_1 (tree *tp, int *walk_subtrees,
+ void *data ATTRIBUTE_UNUSED)
+{
+ tree t = *tp;
+
+ if (TREE_CODE (t) == VAR_DECL && !var_ann (t))
+ {
+ add_referenced_var (t);
+ mark_sym_for_renaming (t);
+ }
+
+ if (IS_TYPE_OR_DECL_P (t))
+ *walk_subtrees = 0;
+
+ return NULL;
}
-/* Add all the non-SSA variables found in STMT's operands to the bitmap
- VARS_TO_RENAME. */
+/* Find any new referenced variables in STMT. */
void
-mark_new_vars_to_rename (tree stmt, bitmap vars_to_rename)
+find_new_referenced_vars (gimple stmt)
{
- ssa_op_iter iter;
- tree val;
- bitmap vars_in_vops_to_rename;
- bool found_exposed_symbol = false;
- int v_may_defs_before, v_may_defs_after;
- int v_must_defs_before, v_must_defs_after;
-
- vars_in_vops_to_rename = BITMAP_XMALLOC ();
-
- /* Before re-scanning the statement for operands, mark the existing
- virtual operands to be renamed again. We do this because when new
- symbols are exposed, the virtual operands that were here before due to
- aliasing will probably be removed by the call to get_stmt_operand.
- Therefore, we need to flag them to be renamed beforehand.
-
- We flag them in a separate bitmap because we don't really want to
- rename them if there are not any newly exposed symbols in the
- statement operands. */
- v_may_defs_before = NUM_V_MAY_DEFS (STMT_V_MAY_DEF_OPS (stmt));
- v_must_defs_before = NUM_V_MUST_DEFS (STMT_V_MUST_DEF_OPS (stmt));
-
- FOR_EACH_SSA_TREE_OPERAND (val, stmt, iter,
- SSA_OP_VMAYDEF | SSA_OP_VUSE | SSA_OP_VMUSTDEF)
+ walk_gimple_op (stmt, find_new_referenced_vars_1, NULL);
+}
+
+
+/* If EXP is a handled component reference for a structure, return the
+ base variable. The access range is delimited by bit positions *POFFSET and
+ *POFFSET + *PMAX_SIZE. The access size is *PSIZE bits. If either
+ *PSIZE or *PMAX_SIZE is -1, they could not be determined. If *PSIZE
+ and *PMAX_SIZE are equal, the access is non-variable. */
+
+tree
+get_ref_base_and_extent (tree exp, HOST_WIDE_INT *poffset,
+ HOST_WIDE_INT *psize,
+ HOST_WIDE_INT *pmax_size)
+{
+ HOST_WIDE_INT bitsize = -1;
+ HOST_WIDE_INT maxsize = -1;
+ tree size_tree = NULL_TREE;
+ HOST_WIDE_INT bit_offset = 0;
+ bool seen_variable_array_ref = false;
+
+ gcc_assert (!SSA_VAR_P (exp));
+
+ /* First get the final access size from just the outermost expression. */
+ if (TREE_CODE (exp) == COMPONENT_REF)
+ size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
+ else if (TREE_CODE (exp) == BIT_FIELD_REF)
+ size_tree = TREE_OPERAND (exp, 1);
+ else
+ {
+ enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
+ if (mode == BLKmode)
+ size_tree = TYPE_SIZE (TREE_TYPE (exp));
+ else
+ bitsize = GET_MODE_BITSIZE (mode);
+ }
+ if (size_tree != NULL_TREE)
{
- if (!DECL_P (val))
- val = SSA_NAME_VAR (val);
- bitmap_set_bit (vars_in_vops_to_rename, var_ann (val)->uid);
+ if (! host_integerp (size_tree, 1))
+ bitsize = -1;
+ else
+ bitsize = TREE_INT_CST_LOW (size_tree);
}
- /* Now force an operand re-scan on the statement and mark any newly
- exposed variables. */
- modify_stmt (stmt);
- get_stmt_operands (stmt);
+ /* Initially, maxsize is the same as the accessed element size.
+ In the following it will only grow (or become -1). */
+ maxsize = bitsize;
- v_may_defs_after = NUM_V_MAY_DEFS (STMT_V_MAY_DEF_OPS (stmt));
- v_must_defs_after = NUM_V_MUST_DEFS (STMT_V_MUST_DEF_OPS (stmt));
+ /* Compute cumulative bit-offset for nested component-refs and array-refs,
+ and find the ultimate containing object. */
+ while (1)
+ {
+ switch (TREE_CODE (exp))
+ {
+ case BIT_FIELD_REF:
+ bit_offset += tree_low_cst (TREE_OPERAND (exp, 2), 0);
+ break;
- FOR_EACH_SSA_TREE_OPERAND (val, stmt, iter, SSA_OP_ALL_OPERANDS)
+ case COMPONENT_REF:
+ {
+ tree field = TREE_OPERAND (exp, 1);
+ tree this_offset = component_ref_field_offset (exp);
+
+ if (this_offset && TREE_CODE (this_offset) == INTEGER_CST)
+ {
+ HOST_WIDE_INT hthis_offset = tree_low_cst (this_offset, 0);
+
+ hthis_offset *= BITS_PER_UNIT;
+ bit_offset += hthis_offset;
+ bit_offset += tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
+ }
+ else
+ {
+ tree csize = TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)));
+ /* We need to adjust maxsize to the whole structure bitsize.
+ But we can subtract any constant offset seen so far,
+ because that would get us out of the structure otherwise. */
+ if (maxsize != -1 && csize && host_integerp (csize, 1))
+ maxsize = TREE_INT_CST_LOW (csize) - bit_offset;
+ else
+ maxsize = -1;
+ }
+ }
+ break;
+
+ case ARRAY_REF:
+ case ARRAY_RANGE_REF:
+ {
+ tree index = TREE_OPERAND (exp, 1);
+ tree low_bound = array_ref_low_bound (exp);
+ tree unit_size = array_ref_element_size (exp);
+
+ /* If the resulting bit-offset is constant, track it. */
+ if (host_integerp (index, 0)
+ && host_integerp (low_bound, 0)
+ && host_integerp (unit_size, 1))
+ {
+ HOST_WIDE_INT hindex = tree_low_cst (index, 0);
+
+ hindex -= tree_low_cst (low_bound, 0);
+ hindex *= tree_low_cst (unit_size, 1);
+ hindex *= BITS_PER_UNIT;
+ bit_offset += hindex;
+
+ /* An array ref with a constant index up in the structure
+ hierarchy will constrain the size of any variable array ref
+ lower in the access hierarchy. */
+ seen_variable_array_ref = false;
+ }
+ else
+ {
+ tree asize = TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)));
+ /* We need to adjust maxsize to the whole array bitsize.
+ But we can subtract any constant offset seen so far,
+ because that would get us outside of the array otherwise. */
+ if (maxsize != -1 && asize && host_integerp (asize, 1))
+ maxsize = TREE_INT_CST_LOW (asize) - bit_offset;
+ else
+ maxsize = -1;
+
+ /* Remember that we have seen an array ref with a variable
+ index. */
+ seen_variable_array_ref = true;
+ }
+ }
+ break;
+
+ case REALPART_EXPR:
+ break;
+
+ case IMAGPART_EXPR:
+ bit_offset += bitsize;
+ break;
+
+ case VIEW_CONVERT_EXPR:
+ /* ??? We probably should give up here and bail out. */
+ break;
+
+ default:
+ goto done;
+ }
+
+ exp = TREE_OPERAND (exp, 0);
+ }
+ done:
+
+ /* We need to deal with variable arrays ending structures such as
+ struct { int length; int a[1]; } x; x.a[d]
+ struct { struct { int a; int b; } a[1]; } x; x.a[d].a
+ struct { struct { int a[1]; } a[1]; } x; x.a[0][d], x.a[d][0]
+ where we do not know maxsize for variable index accesses to
+ the array. The simplest way to conservatively deal with this
+ is to punt in the case that offset + maxsize reaches the
+ base type boundary. */
+ if (seen_variable_array_ref
+ && maxsize != -1
+ && host_integerp (TYPE_SIZE (TREE_TYPE (exp)), 1)
+ && bit_offset + maxsize
+ == (signed)TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (exp))))
+ maxsize = -1;
+
+ /* ??? Due to negative offsets in ARRAY_REF we can end up with
+ negative bit_offset here. We might want to store a zero offset
+ in this case. */
+ *poffset = bit_offset;
+ *psize = bitsize;
+ *pmax_size = maxsize;
+
+ return exp;
+}
+
+/* Returns true if STMT references an SSA_NAME that has
+ SSA_NAME_OCCURS_IN_ABNORMAL_PHI set, otherwise false. */
+
+bool
+stmt_references_abnormal_ssa_name (gimple stmt)
+{
+ ssa_op_iter oi;
+ use_operand_p use_p;
+
+ FOR_EACH_SSA_USE_OPERAND (use_p, stmt, oi, SSA_OP_USE)
{
- if (DECL_P (val))
+ if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (USE_FROM_PTR (use_p)))
+ return true;
+ }
+
+ return false;
+}
+
+/* Return true, if the two memory references REF1 and REF2 may alias. */
+
+bool
+refs_may_alias_p (tree ref1, tree ref2)
+{
+ tree base1, base2;
+ HOST_WIDE_INT offset1 = 0, offset2 = 0;
+ HOST_WIDE_INT size1 = -1, size2 = -1;
+ HOST_WIDE_INT max_size1 = -1, max_size2 = -1;
+ bool strict_aliasing_applies;
+
+ gcc_assert ((SSA_VAR_P (ref1)
+ || handled_component_p (ref1)
+ || INDIRECT_REF_P (ref1)
+ || TREE_CODE (ref1) == TARGET_MEM_REF)
+ && (SSA_VAR_P (ref2)
+ || handled_component_p (ref2)
+ || INDIRECT_REF_P (ref2)
+ || TREE_CODE (ref2) == TARGET_MEM_REF));
+
+ /* Defer to TBAA if possible. */
+ if (flag_strict_aliasing
+ && !alias_sets_conflict_p (get_alias_set (ref1), get_alias_set (ref2)))
+ return false;
+
+ /* Decompose the references into their base objects and the access. */
+ base1 = ref1;
+ if (handled_component_p (ref1))
+ base1 = get_ref_base_and_extent (ref1, &offset1, &size1, &max_size1);
+ base2 = ref2;
+ if (handled_component_p (ref2))
+ base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &max_size2);
+
+ /* If both references are based on different variables, they cannot alias.
+ If both references are based on the same variable, they cannot alias if
+ the accesses do not overlap. */
+ if (SSA_VAR_P (base1)
+ && SSA_VAR_P (base2))
+ {
+ if (!operand_equal_p (base1, base2, 0))
+ return false;
+ return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
+ }
+
+ /* If one base is a ref-all pointer weird things are allowed. */
+ strict_aliasing_applies = (flag_strict_aliasing
+ && (!INDIRECT_REF_P (base1)
+ || get_alias_set (base1) != 0)
+ && (!INDIRECT_REF_P (base2)
+ || get_alias_set (base2) != 0));
+
+ /* If strict aliasing applies the only way to access a scalar variable
+ is through a pointer dereference or through a union (gcc extension). */
+ if (strict_aliasing_applies
+ && ((SSA_VAR_P (ref2)
+ && !AGGREGATE_TYPE_P (TREE_TYPE (ref2))
+ && !INDIRECT_REF_P (ref1)
+ && TREE_CODE (TREE_TYPE (base1)) != UNION_TYPE)
+ || (SSA_VAR_P (ref1)
+ && !AGGREGATE_TYPE_P (TREE_TYPE (ref1))
+ && !INDIRECT_REF_P (ref2)
+ && TREE_CODE (TREE_TYPE (base2)) != UNION_TYPE)))
+ return false;
+
+ /* If both references are through the same type, or if strict aliasing
+ doesn't apply they are through two same pointers, they do not alias
+ if the accesses do not overlap. */
+ if ((strict_aliasing_applies
+ && (TYPE_MAIN_VARIANT (TREE_TYPE (base1))
+ == TYPE_MAIN_VARIANT (TREE_TYPE (base2))))
+ || (TREE_CODE (base1) == INDIRECT_REF
+ && TREE_CODE (base2) == INDIRECT_REF
+ && operand_equal_p (TREE_OPERAND (base1, 0),
+ TREE_OPERAND (base2, 0), 0)))
+ return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
+
+ /* If both are component references through pointers try to find a
+ common base and apply offset based disambiguation. This handles
+ for example
+ struct A { int i; int j; } *q;
+ struct B { struct A a; int k; } *p;
+ disambiguating q->i and p->a.j. */
+ if (strict_aliasing_applies
+ && (TREE_CODE (base1) == INDIRECT_REF
+ || TREE_CODE (base2) == INDIRECT_REF)
+ && handled_component_p (ref1)
+ && handled_component_p (ref2))
+ {
+ tree *refp;
+ /* Now search for the type of base1 in the access path of ref2. This
+ would be a common base for doing offset based disambiguation on. */
+ refp = &ref2;
+ while (handled_component_p (*refp)
+ /* Note that the following is only conservative if there are
+ never copies of types appearing as sub-structures. */
+ && (TYPE_MAIN_VARIANT (TREE_TYPE (*refp))
+ != TYPE_MAIN_VARIANT (TREE_TYPE (base1))))
+ refp = &TREE_OPERAND (*refp, 0);
+ if (TYPE_MAIN_VARIANT (TREE_TYPE (*refp))
+ == TYPE_MAIN_VARIANT (TREE_TYPE (base1)))
+ {
+ HOST_WIDE_INT offadj, sztmp, msztmp;
+ get_ref_base_and_extent (*refp, &offadj, &sztmp, &msztmp);
+ offset2 -= offadj;
+ return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
+ }
+ /* The other way around. */
+ refp = &ref1;
+ while (handled_component_p (*refp)
+ && (TYPE_MAIN_VARIANT (TREE_TYPE (*refp))
+ != TYPE_MAIN_VARIANT (TREE_TYPE (base2))))
+ refp = &TREE_OPERAND (*refp, 0);
+ if (TYPE_MAIN_VARIANT (TREE_TYPE (*refp))
+ == TYPE_MAIN_VARIANT (TREE_TYPE (base2)))
{
- found_exposed_symbol = true;
- bitmap_set_bit (vars_to_rename, var_ann (val)->uid);
+ HOST_WIDE_INT offadj, sztmp, msztmp;
+ get_ref_base_and_extent (*refp, &offadj, &sztmp, &msztmp);
+ offset1 -= offadj;
+ return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
}
+ /* If we can be sure to catch all equivalent types in the search
+ for the common base then we could return false here. In that
+ case we would be able to disambiguate q->i and p->k. */
}
- /* If we found any newly exposed symbols, or if there are fewer VDEF
- operands in the statement, add the variables we had set in
- VARS_IN_VOPS_TO_RENAME to VARS_TO_RENAME. We need to check for
- vanishing VDEFs because in those cases, the names that were formerly
- generated by this statement are not going to be available anymore. */
- if (found_exposed_symbol
- || v_may_defs_before > v_may_defs_after
- || v_must_defs_before > v_must_defs_after)
- bitmap_ior_into (vars_to_rename, vars_in_vops_to_rename);
-
- BITMAP_XFREE (vars_in_vops_to_rename);
+ return true;
}
-/* Find all variables within the gimplified statement that were not previously
- visible to the function and add them to the referenced variables list. */
+/* Given a stmt STMT that references memory, return the single stmt
+ that is reached by following the VUSE -> VDEF link. Returns
+ NULL_TREE, if there is no single stmt that defines all VUSEs of
+ STMT.
+ Note that for a stmt with a single virtual operand this may return
+ a PHI node as well. Note that if all VUSEs are default definitions
+ this function will return an empty statement. */
-static tree
-find_new_referenced_vars_1 (tree *tp, int *walk_subtrees,
- void *data ATTRIBUTE_UNUSED)
+gimple
+get_single_def_stmt (gimple stmt)
{
- tree t = *tp;
+ gimple def_stmt = NULL;
+ tree use;
+ ssa_op_iter iter;
- if (TREE_CODE (t) == VAR_DECL && !var_ann (t))
- add_referenced_tmp_var (t);
+ FOR_EACH_SSA_TREE_OPERAND (use, stmt, iter, SSA_OP_VIRTUAL_USES)
+ {
+ gimple tmp = SSA_NAME_DEF_STMT (use);
+
+ /* ??? This is too simplistic for multiple virtual operands
+ reaching different PHI nodes of the same basic blocks or for
+ reaching all default definitions. */
+ if (def_stmt
+ && def_stmt != tmp
+ && !(gimple_nop_p (def_stmt)
+ && gimple_nop_p (tmp)))
+ return NULL;
+
+ def_stmt = tmp;
+ }
- if (IS_TYPE_OR_DECL_P (t))
- *walk_subtrees = 0;
+ return def_stmt;
+}
- return NULL;
+/* Given a PHI node of virtual operands, tries to eliminate cyclic
+ reached definitions if they do not alias REF and returns the
+ defining statement of the single virtual operand that flows in
+ from a non-backedge. Returns NULL_TREE if such statement within
+ the above conditions cannot be found. */
+
+gimple
+get_single_def_stmt_from_phi (tree ref, gimple phi)
+{
+ tree def_arg = NULL_TREE;
+ unsigned i;
+
+ /* Find the single PHI argument that is not flowing in from a
+ back edge and verify that the loop-carried definitions do
+ not alias the reference we look for. */
+ for (i = 0; i < gimple_phi_num_args (phi); ++i)
+ {
+ tree arg = PHI_ARG_DEF (phi, i);
+ gimple def_stmt;
+
+ if (!(gimple_phi_arg_edge (phi, i)->flags & EDGE_DFS_BACK))
+ {
+ /* Multiple non-back edges? Do not try to handle this. */
+ if (def_arg)
+ return NULL;
+ def_arg = arg;
+ continue;
+ }
+
+ /* Follow the definitions back to the original PHI node. Bail
+ out once a definition is found that may alias REF. */
+ def_stmt = SSA_NAME_DEF_STMT (arg);
+ do
+ {
+ if (!is_gimple_assign (def_stmt)
+ || refs_may_alias_p (ref, gimple_assign_lhs (def_stmt)))
+ return NULL;
+ /* ??? This will only work, reaching the PHI node again if
+ there is a single virtual operand on def_stmt. */
+ def_stmt = get_single_def_stmt (def_stmt);
+ if (!def_stmt)
+ return NULL;
+ }
+ while (def_stmt != phi);
+ }
+
+ return SSA_NAME_DEF_STMT (def_arg);
}
-void
-find_new_referenced_vars (tree *stmt_p)
+/* Return the single reference statement defining all virtual uses
+ on STMT or NULL_TREE, if there are multiple defining statements.
+ Take into account only definitions that alias REF if following
+ back-edges when looking through a loop PHI node. */
+
+gimple
+get_single_def_stmt_with_phi (tree ref, gimple stmt)
{
- walk_tree (stmt_p, find_new_referenced_vars_1, NULL, NULL);
+ switch (NUM_SSA_OPERANDS (stmt, SSA_OP_VIRTUAL_USES))
+ {
+ case 0:
+ gcc_unreachable ();
+
+ case 1:
+ {
+ gimple def_stmt = SSA_NAME_DEF_STMT (SINGLE_SSA_TREE_OPERAND
+ (stmt, SSA_OP_VIRTUAL_USES));
+ /* We can handle lookups over PHI nodes only for a single
+ virtual operand. */
+ if (gimple_code (def_stmt) == GIMPLE_PHI)
+ return get_single_def_stmt_from_phi (ref, def_stmt);
+ return def_stmt;
+ }
+
+ default:
+ return get_single_def_stmt (stmt);
+ }
}