You should have received a copy of the GNU General Public License
along with GCC; see the file COPYING. If not, write to
-the Free Software Foundation, 59 Temple Place - Suite 330,
-Boston, MA 02111-1307, USA. */
+the Free Software Foundation, 51 Franklin Street, Fifth Floor,
+Boston, MA 02110-1301, USA. */
#include "config.h"
#include "system.h"
#include "flags.h"
#include "function.h"
#include "diagnostic.h"
-#include "errors.h"
#include "tree-flow.h"
#include "tree-inline.h"
#include "tree-pass.h"
#include "ggc.h"
#include "timevar.h"
-
+#include "toplev.h"
#include "langhooks.h"
/* This file contains the code required to manage the operands cache of the
static GTY (()) struct ssa_operand_memory_d *operand_memory = NULL;
static unsigned operand_memory_index;
-static void note_addressable (tree, stmt_ann_t);
static void get_expr_operands (tree, tree *, int);
static void get_asm_expr_operands (tree);
static void get_indirect_ref_operands (tree, tree, int);
+static void get_tmr_operands (tree, tree, int);
static void get_call_expr_operands (tree, tree);
static inline void append_def (tree *);
static inline void append_use (tree *);
if (TREE_CODE (lhs) == VIEW_CONVERT_EXPR)
lhs = TREE_OPERAND (lhs, 0);
- if (TREE_CODE (lhs) != ARRAY_REF && TREE_CODE (lhs) != ARRAY_RANGE_REF
+ if (TREE_CODE (lhs) != ARRAY_REF
+ && TREE_CODE (lhs) != ARRAY_RANGE_REF
&& TREE_CODE (lhs) != BIT_FIELD_REF
&& TREE_CODE (lhs) != REALPART_EXPR
&& TREE_CODE (lhs) != IMAGPART_EXPR)
/* Free any operands vectors in OPS. */
-#if 0
-static void
+void
free_ssa_operands (stmt_operands_p ops)
{
ops->def_ops = NULL;
ops->maydef_ops = NULL;
ops->mustdef_ops = NULL;
ops->vuse_ops = NULL;
- while (ops->memory.next != NULL)
- {
- operand_memory_p tmp = ops->memory.next;
- ops->memory.next = tmp->next;
- ggc_free (tmp);
- }
}
-#endif
/* Get the operands of statement STMT. Note that repeated calls to
get_indirect_ref_operands (stmt, expr, flags);
return;
+ case TARGET_MEM_REF:
+ get_tmr_operands (stmt, expr, flags);
+ return;
+
case ARRAY_REF:
case ARRAY_RANGE_REF:
/* Treat array references as references to the virtual variable
case IMAGPART_EXPR:
{
tree ref;
- HOST_WIDE_INT offset, size;
+ unsigned HOST_WIDE_INT offset, size;
/* This component ref becomes an access to all of the subvariables
it can touch, if we can determine that, but *NOT* the real one.
If we can't determine which fields we could touch, the recursion
bool exact;
if (overlap_subvar (offset, size, sv, &exact))
{
+ bool subvar_flags = flags;
if (!exact)
- flags &= ~opf_kill_def;
- add_stmt_operand (&sv->var, s_ann, flags);
+ subvar_flags &= ~opf_kill_def;
+ add_stmt_operand (&sv->var, s_ann, subvar_flags);
}
}
}
flags & ~opf_kill_def);
if (code == COMPONENT_REF)
- get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
+ {
+ if (s_ann && TREE_THIS_VOLATILE (TREE_OPERAND (expr, 1)))
+ s_ann->has_volatile_ops = true;
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
+ }
return;
}
case WITH_SIZE_EXPR:
if (!allows_reg && allows_mem)
{
tree t = get_base_address (TREE_VALUE (link));
- if (t && DECL_P (t))
- note_addressable (t, s_ann);
+ if (t && DECL_P (t) && s_ann)
+ add_to_addressable_set (t, &s_ann->addresses_taken);
}
get_expr_operands (stmt, &TREE_VALUE (link), opf_is_def);
if (!allows_reg && allows_mem)
{
tree t = get_base_address (TREE_VALUE (link));
- if (t && DECL_P (t))
- note_addressable (t, s_ann);
+ if (t && DECL_P (t) && s_ann)
+ add_to_addressable_set (t, &s_ann->addresses_taken);
}
get_expr_operands (stmt, &TREE_VALUE (link), 0);
add_stmt_operand (&global_var, s_ann, opf_is_def);
else
EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
- {
- tree var = referenced_var (i);
- add_stmt_operand (&var, s_ann, opf_is_def | opf_non_specific);
- }
+ {
+ tree var = referenced_var (i);
+ add_stmt_operand (&var, s_ann, opf_is_def | opf_non_specific);
+ }
/* Now clobber all addressables. */
EXECUTE_IF_SET_IN_BITMAP (addressable_vars, 0, i, bi)
get_expr_operands (stmt, pptr, opf_none);
}
+/* A subroutine of get_expr_operands to handle TARGET_MEM_REF. */
+
+static void
+get_tmr_operands (tree stmt, tree expr, int flags)
+{
+ tree tag = TMR_TAG (expr);
+
+ /* First record the real operands. */
+ get_expr_operands (stmt, &TMR_BASE (expr), opf_none);
+ get_expr_operands (stmt, &TMR_INDEX (expr), opf_none);
+
+ /* MEM_REFs should never be killing. */
+ flags &= ~opf_kill_def;
+
+ if (TMR_SYMBOL (expr))
+ {
+ stmt_ann_t ann = stmt_ann (stmt);
+ add_to_addressable_set (TMR_SYMBOL (expr), &ann->addresses_taken);
+ }
+
+ if (tag)
+ add_stmt_operand (&tag, stmt_ann (stmt), flags);
+ else
+ /* Something weird, so ensure that we will be careful. */
+ stmt_ann (stmt)->has_volatile_ops = true;
+}
+
/* A subroutine of get_expr_operands to handle CALL_EXPR. */
static void
/* If the operand is an ADDR_EXPR, add its operand to the list of
variables that have had their address taken in this statement. */
- if (TREE_CODE (var) == ADDR_EXPR)
+ if (TREE_CODE (var) == ADDR_EXPR && s_ann)
{
- note_addressable (TREE_OPERAND (var, 0), s_ann);
+ add_to_addressable_set (TREE_OPERAND (var, 0), &s_ann->addresses_taken);
return;
}
/* If the variable cannot be modified and this is a V_MAY_DEF change
it into a VUSE. This happens when read-only variables are marked
- call-clobbered and/or aliased to writeable variables. So we only
+ call-clobbered and/or aliased to writable variables. So we only
check that this only happens on non-specific stores.
Note that if this is a specific store, i.e. associated with a
if (flags & opf_is_def)
{
- bool added_may_defs_p = false;
-
/* If the variable is also an alias tag, add a virtual
operand for it, otherwise we will miss representing
references to the members of the variable's alias set.
This fixes the bug in gcc.c-torture/execute/20020503-1.c. */
if (v_ann->is_alias_tag)
- {
- added_may_defs_p = true;
- append_v_may_def (var);
- }
+ append_v_may_def (var);
for (i = 0; i < VARRAY_ACTIVE_SIZE (aliases); i++)
- {
- /* While VAR may be modifiable, some of its aliases
- may not be. If that's the case, we don't really
- need to add them a V_MAY_DEF for them. */
- tree alias = VARRAY_TREE (aliases, i);
-
- if (unmodifiable_var_p (alias))
- append_vuse (alias);
- else
- {
- append_v_may_def (alias);
- added_may_defs_p = true;
- }
- }
+ append_v_may_def (VARRAY_TREE (aliases, i));
- if (s_ann && added_may_defs_p)
+ if (s_ann)
s_ann->makes_aliased_stores = 1;
}
else
}
-/* Record that VAR had its address taken in the statement with annotations
- S_ANN. */
+/* Add the base address of REF to the set *ADDRESSES_TAKEN. If
+ *ADDRESSES_TAKEN is NULL, a new set is created. REF may be
+ a single variable whose address has been taken or any other valid
+ GIMPLE memory reference (structure reference, array, etc). If the
+ base address of REF is a decl that has sub-variables, also add all
+ of its sub-variables. */
-static void
-note_addressable (tree var, stmt_ann_t s_ann)
+void
+add_to_addressable_set (tree ref, bitmap *addresses_taken)
{
+ tree var;
subvar_t svars;
- if (!s_ann)
- return;
-
+ gcc_assert (addresses_taken);
+
/* Note that it is *NOT OKAY* to use the target of a COMPONENT_REF
- as the only thing we take the address of.
- See PR 21407 and the ensuing mailing list discussion. */
-
- var = get_base_address (var);
+ as the only thing we take the address of. If VAR is a structure,
+ taking the address of a field means that the whole structure may
+ be referenced using pointer arithmetic. See PR 21407 and the
+ ensuing mailing list discussion. */
+ var = get_base_address (ref);
if (var && SSA_VAR_P (var))
{
- if (s_ann->addresses_taken == NULL)
- s_ann->addresses_taken = BITMAP_GGC_ALLOC ();
+ if (*addresses_taken == NULL)
+ *addresses_taken = BITMAP_GGC_ALLOC ();
-
if (var_can_have_subvars (var)
&& (svars = get_subvars_for_var (var)))
{
subvar_t sv;
for (sv = svars; sv; sv = sv->next)
- bitmap_set_bit (s_ann->addresses_taken, var_ann (sv->var)->uid);
+ {
+ bitmap_set_bit (*addresses_taken, DECL_UID (sv->var));
+ TREE_ADDRESSABLE (sv->var) = 1;
+ }
}
else
- bitmap_set_bit (s_ann->addresses_taken, var_ann (var)->uid);
+ {
+ bitmap_set_bit (*addresses_taken, DECL_UID (var));
+ TREE_ADDRESSABLE (var) = 1;
+ }
}
}
+
/* Add clobbering definitions for .GLOBAL_VAR or for each of the call
clobbered variables in the function. */