You should have received a copy of the GNU General Public License
along with GCC; see the file COPYING. If not, write to
-the Free Software Foundation, 59 Temple Place - Suite 330,
-Boston, MA 02111-1307, USA. */
+the Free Software Foundation, 51 Franklin Street, Fifth Floor,
+Boston, MA 02110-1301, USA. */
#include "config.h"
#include "system.h"
#include "flags.h"
#include "function.h"
#include "diagnostic.h"
-#include "errors.h"
#include "tree-flow.h"
#include "tree-inline.h"
#include "tree-pass.h"
#include "ggc.h"
#include "timevar.h"
+#include "toplev.h"
#include "langhooks.h"
VUSE for 'b'. */
#define opf_no_vops (1 << 2)
+/* Operand is a "non-specific" kill for call-clobbers and such. This is used
+ to distinguish "reset the world" events from explicit MODIFY_EXPRs. */
+#define opf_non_specific (1 << 3)
+
/* This structure maintain a sorted list of operands which is created by
parse_ssa_operand. */
struct opbuild_list_d GTY (())
static void get_expr_operands (tree, tree *, int);
static void get_asm_expr_operands (tree);
static void get_indirect_ref_operands (tree, tree, int);
+static void get_tmr_operands (tree, tree, int);
static void get_call_expr_operands (tree, tree);
static inline void append_def (tree *);
static inline void append_use (tree *);
/* Free any operands vectors in OPS. */
-#if 0
-static void
+void
free_ssa_operands (stmt_operands_p ops)
{
ops->def_ops = NULL;
ops->maydef_ops = NULL;
ops->mustdef_ops = NULL;
ops->vuse_ops = NULL;
- while (ops->memory.next != NULL)
- {
- operand_memory_p tmp = ops->memory.next;
- ops->memory.next = tmp->next;
- ggc_free (tmp);
- }
}
-#endif
/* Get the operands of statement STMT. Note that repeated calls to
get_indirect_ref_operands (stmt, expr, flags);
return;
+ case TARGET_MEM_REF:
+ get_tmr_operands (stmt, expr, flags);
+ return;
+
case ARRAY_REF:
case ARRAY_RANGE_REF:
/* Treat array references as references to the virtual variable
bool exact;
if (overlap_subvar (offset, size, sv, &exact))
{
- if (exact)
+ if (!exact)
flags &= ~opf_kill_def;
add_stmt_operand (&sv->var, s_ann, flags);
}
flags & ~opf_kill_def);
if (code == COMPONENT_REF)
- get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
+ {
+ if (s_ann && TREE_THIS_VOLATILE (TREE_OPERAND (expr, 1)))
+ s_ann->has_volatile_ops = true;
+ get_expr_operands (stmt, &TREE_OPERAND (expr, 2), opf_none);
+ }
return;
}
case WITH_SIZE_EXPR:
add_stmt_operand (&global_var, s_ann, opf_is_def);
else
EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, i, bi)
- {
- tree var = referenced_var (i);
- add_stmt_operand (&var, s_ann, opf_is_def);
- }
+ {
+ tree var = referenced_var (i);
+ add_stmt_operand (&var, s_ann, opf_is_def | opf_non_specific);
+ }
/* Now clobber all addressables. */
EXECUTE_IF_SET_IN_BITMAP (addressable_vars, 0, i, bi)
&& get_subvars_for_var (var) != NULL)
continue;
- add_stmt_operand (&var, s_ann, opf_is_def);
+ add_stmt_operand (&var, s_ann, opf_is_def | opf_non_specific);
}
break;
get_expr_operands (stmt, pptr, opf_none);
}
+/* A subroutine of get_expr_operands to handle TARGET_MEM_REF. */
+
+static void
+get_tmr_operands (tree stmt, tree expr, int flags)
+{
+ tree tag = TMR_TAG (expr);
+
+ /* First record the real operands. */
+ get_expr_operands (stmt, &TMR_BASE (expr), opf_none);
+ get_expr_operands (stmt, &TMR_INDEX (expr), opf_none);
+
+ /* MEM_REFs should never be killing. */
+ flags &= ~opf_kill_def;
+
+ if (TMR_SYMBOL (expr))
+ note_addressable (TMR_SYMBOL (expr), stmt_ann (stmt));
+
+ if (tag)
+ add_stmt_operand (&tag, stmt_ann (stmt), flags);
+ else
+ /* Something weird, so ensure that we will be careful. */
+ stmt_ann (stmt)->has_volatile_ops = true;
+}
+
/* A subroutine of get_expr_operands to handle CALL_EXPR. */
static void
/* If the variable cannot be modified and this is a V_MAY_DEF change
it into a VUSE. This happens when read-only variables are marked
- call-clobbered and/or aliased to writeable variables. So we only
- check that this only happens on stores, and not writes to GIMPLE
- registers.
-
- FIXME: The C++ FE is emitting assignments in the IL stream for
- read-only globals. This is wrong, but for the time being disable
- this transformation on V_MUST_DEF operands (otherwise, we
- mis-optimize SPEC2000's eon). */
- if ((flags & opf_is_def)
- && !(flags & opf_kill_def)
- && unmodifiable_var_p (var))
+ call-clobbered and/or aliased to writable variables. So we only
+ check that this only happens on non-specific stores.
+
+ Note that if this is a specific store, i.e. associated with a
+ modify_expr, then we can't suppress the V_DEF, lest we run into
+ validation problems.
+
+ This can happen when programs cast away const, leaving us with a
+ store to read-only memory. If the statement is actually executed
+ at runtime, then the program is ill formed. If the statement is
+ not executed then all is well. At the very least, we cannot ICE. */
+ if ((flags & opf_non_specific) && unmodifiable_var_p (var))
{
gcc_assert (!is_real_op);
- flags &= ~opf_is_def;
+ flags &= ~(opf_is_def | opf_kill_def);
}
if (is_real_op)
static void
note_addressable (tree var, stmt_ann_t s_ann)
{
- tree ref;
subvar_t svars;
- HOST_WIDE_INT offset;
- HOST_WIDE_INT size;
if (!s_ann)
return;
- /* If this is a COMPONENT_REF, and we know exactly what it touches, we only
- take the address of the subvariables it will touch.
- Otherwise, we take the address of all the subvariables, plus the real
- ones. */
-
- if (var && TREE_CODE (var) == COMPONENT_REF
- && (ref = okay_component_ref_for_subvars (var, &offset, &size)))
- {
- subvar_t sv;
- svars = get_subvars_for_var (ref);
-
- if (s_ann->addresses_taken == NULL)
- s_ann->addresses_taken = BITMAP_GGC_ALLOC ();
-
- for (sv = svars; sv; sv = sv->next)
- {
- if (overlap_subvar (offset, size, sv, NULL))
- bitmap_set_bit (s_ann->addresses_taken, var_ann (sv->var)->uid);
- }
- return;
- }
+ /* Note that it is *NOT OKAY* to use the target of a COMPONENT_REF
+ as the only thing we take the address of.
+ See PR 21407 and the ensuing mailing list discussion. */
var = get_base_address (var);
if (var && SSA_VAR_P (var))
{
subvar_t sv;
for (sv = svars; sv; sv = sv->next)
- bitmap_set_bit (s_ann->addresses_taken, var_ann (sv->var)->uid);
+ bitmap_set_bit (s_ann->addresses_taken, DECL_UID (sv->var));
}
else
- bitmap_set_bit (s_ann->addresses_taken, var_ann (var)->uid);
+ bitmap_set_bit (s_ann->addresses_taken, DECL_UID (var));
}
}
if (unmodifiable_var_p (var))
add_stmt_operand (&var, &empty_ann, opf_none);
else
- add_stmt_operand (&var, &empty_ann, opf_is_def);
+ add_stmt_operand (&var, &empty_ann, opf_is_def | opf_non_specific);
}
clobbered_aliased_loads = empty_ann.makes_aliased_loads;
EXECUTE_IF_SET_IN_BITMAP (call_clobbered_vars, 0, u, bi)
{
tree var = referenced_var (u);
- add_stmt_operand (&var, &empty_ann, opf_none);
+ add_stmt_operand (&var, &empty_ann, opf_none | opf_non_specific);
}
ro_call_aliased_loads = empty_ann.makes_aliased_loads;