/* Inline functions for tree-flow.h
- Copyright (C) 2001, 2003 Free Software Foundation, Inc.
+ Copyright (C) 2001, 2003, 2005 Free Software Foundation, Inc.
Contributed by Diego Novillo <dnovillo@redhat.com>
This file is part of GCC.
static inline var_ann_t
var_ann (tree t)
{
-#if defined ENABLE_CHECKING
- if (t == NULL_TREE
- || !DECL_P (t)
- || (t->common.ann
- && t->common.ann->common.type != VAR_ANN))
- abort ();
-#endif
+ gcc_assert (t);
+ gcc_assert (DECL_P (t));
+ gcc_assert (!t->common.ann || t->common.ann->common.type == VAR_ANN);
return (var_ann_t) t->common.ann;
}
static inline stmt_ann_t
stmt_ann (tree t)
{
-#if defined ENABLE_CHECKING
- if (!is_gimple_stmt (t))
- abort ();
+#ifdef ENABLE_CHECKING
+ gcc_assert (is_gimple_stmt (t));
#endif
-
return (stmt_ann_t) t->common.ann;
}
static inline basic_block
bb_for_stmt (tree t)
{
- stmt_ann_t ann = stmt_ann (t);
+ stmt_ann_t ann;
+
+ if (TREE_CODE (t) == PHI_NODE)
+ return PHI_BB (t);
+
+ ann = stmt_ann (t);
return ann ? ann->bb : NULL;
}
return "???";
}
+/* Return true if T is a noreturn call. */
+static inline bool
+noreturn_call_p (tree t)
+{
+ tree call = get_call_expr_in (t);
+ return call != 0 && (call_expr_flags (call) & ECF_NORETURN) != 0;
+}
+
/* Mark statement T as modified. */
static inline void
modify_stmt (tree t)
stmt_ann_t ann = stmt_ann (t);
if (ann == NULL)
ann = create_stmt_ann (t);
+ else if (noreturn_call_p (t))
+ VEC_safe_push (tree, modified_noreturn_calls, t);
ann->modified = 1;
}
static inline def_optype
get_def_ops (stmt_ann_t ann)
{
- return ann ? ann->def_ops : NULL;
+ return ann ? ann->operands.def_ops : NULL;
}
/* Return the uses present in ANN, a statement annotation.
static inline use_optype
get_use_ops (stmt_ann_t ann)
{
- return ann ? ann->use_ops : NULL;
+ return ann ? ann->operands.use_ops : NULL;
}
/* Return the virtual may-defs present in ANN, a statement
static inline v_may_def_optype
get_v_may_def_ops (stmt_ann_t ann)
{
- return ann ? ann->v_may_def_ops : NULL;
+ return ann ? ann->operands.v_may_def_ops : NULL;
}
/* Return the virtual uses present in ANN, a statement annotation.
static inline vuse_optype
get_vuse_ops (stmt_ann_t ann)
{
- return ann ? ann->vuse_ops : NULL;
+ return ann ? ann->operands.vuse_ops : NULL;
}
/* Return the virtual must-defs present in ANN, a statement
static inline v_must_def_optype
get_v_must_def_ops (stmt_ann_t ann)
{
- return ann ? ann->v_must_def_ops : NULL;
+ return ann ? ann->operands.v_must_def_ops : NULL;
}
/* Return the tree pointer to by USE. */
static inline use_operand_p
get_use_op_ptr (use_optype uses, unsigned int index)
{
-#ifdef ENABLE_CHECKING
- if (index >= uses->num_uses)
- abort();
-#endif
+ gcc_assert (index < uses->num_uses);
return uses->uses[index];
}
static inline def_operand_p
get_def_op_ptr (def_optype defs, unsigned int index)
{
-#ifdef ENABLE_CHECKING
- if (index >= defs->num_defs)
- abort();
-#endif
+ gcc_assert (index < defs->num_defs);
return defs->defs[index];
}
get_v_may_def_result_ptr(v_may_def_optype v_may_defs, unsigned int index)
{
def_operand_p op;
-#ifdef ENABLE_CHECKING
- if (index >= v_may_defs->num_v_may_defs)
- abort();
-#endif
- op.def = &(v_may_defs->v_may_defs[index * 2]);
+ gcc_assert (index < v_may_defs->num_v_may_defs);
+ op.def = &(v_may_defs->v_may_defs[index].def);
return op;
}
get_v_may_def_op_ptr(v_may_def_optype v_may_defs, unsigned int index)
{
use_operand_p op;
-#ifdef ENABLE_CHECKING
- if (index >= v_may_defs->num_v_may_defs)
- abort();
-#endif
- op.use = &(v_may_defs->v_may_defs[index * 2 + 1]);
+ gcc_assert (index < v_may_defs->num_v_may_defs);
+ op.use = &(v_may_defs->v_may_defs[index].use);
return op;
}
get_vuse_op_ptr(vuse_optype vuses, unsigned int index)
{
use_operand_p op;
-#ifdef ENABLE_CHECKING
- if (index >= vuses->num_vuses)
- abort();
-#endif
+ gcc_assert (index < vuses->num_vuses);
op.use = &(vuses->vuses[index]);
return op;
}
-/* Return a def_operand_p that is the V_MUST_DEF_OP for the
+/* Return a def_operand_p that is the V_MUST_DEF_RESULT for the
V_MUST_DEF at INDEX in the V_MUST_DEFS array. */
static inline def_operand_p
-get_v_must_def_op_ptr (v_must_def_optype v_must_defs, unsigned int index)
+get_v_must_def_result_ptr (v_must_def_optype v_must_defs, unsigned int index)
{
def_operand_p op;
-#ifdef ENABLE_CHECKING
- if (index >= v_must_defs->num_v_must_defs)
- abort();
-#endif
- op.def = &(v_must_defs->v_must_defs[index]);
+ gcc_assert (index < v_must_defs->num_v_must_defs);
+ op.def = &(v_must_defs->v_must_defs[index].def);
+ return op;
+}
+
+/* Return a use_operand_p that is the V_MUST_DEF_KILL for the
+ V_MUST_DEF at INDEX in the V_MUST_DEFS array. */
+static inline use_operand_p
+get_v_must_def_kill_ptr (v_must_def_optype v_must_defs, unsigned int index)
+{
+ use_operand_p op;
+ gcc_assert (index < v_must_defs->num_v_must_defs);
+ op.use = &(v_must_defs->v_must_defs[index].use);
return op;
}
return op;
}
-/* Mark the beginning of changes to the SSA operands for STMT. */
-static inline void
-start_ssa_stmt_operands (tree stmt ATTRIBUTE_UNUSED)
-{
-#ifdef ENABLE_CHECKING
- verify_start_operands (stmt);
-#endif
-}
-
/* Return the bitmap of addresses taken by STMT, or NULL if it takes
no addresses. */
static inline bitmap
static dataflow_t
get_immediate_uses (tree stmt)
{
- stmt_ann_t ann = stmt_ann (stmt);
+ stmt_ann_t ann;
+
+ if (TREE_CODE (stmt) == PHI_NODE)
+ return PHI_DF (stmt);
+
+ ann = stmt_ann (stmt);
return ann ? ann->df : NULL;
}
return NULL_TREE;
#ifdef ENABLE_CHECKING
- if (num >= num_immediate_uses (df))
- abort ();
+ gcc_assert (num < num_immediate_uses (df));
#endif
if (num < 2)
return df->uses[num];
static inline tree
phi_nodes (basic_block bb)
{
- if (bb->index < 0)
- return NULL;
return bb_ann (bb)->phi_nodes;
}
set_bb_for_stmt (phi, bb);
}
-/* Return the phi index number for an edge. */
-static inline int
-phi_arg_from_edge (tree phi, edge e)
-{
- int i;
-#if defined ENABLE_CHECKING
- if (!phi || TREE_CODE (phi) != PHI_NODE)
- abort();
-#endif
-
- for (i = 0; i < PHI_NUM_ARGS (phi); i++)
- if (PHI_ARG_EDGE (phi, i) == e)
- return i;
-
- return -1;
-}
-
/* Mark VAR as used, so that it'll be preserved during rtl expansion. */
static inline void
if (TREE_CODE (t) == SSA_NAME)
return true;
#ifdef ENABLE_CHECKING
- if (!is_gimple_min_invariant (t))
- abort ();
+ gcc_assert (is_gimple_min_invariant (t));
#endif
return false;
}
bsi.tsi = tsi_start (bb->stmt_list);
else
{
-#ifdef ENABLE_CHECKING
- if (bb->index >= 0)
- abort ();
-#endif
+ gcc_assert (bb->index < 0);
bsi.tsi.ptr = NULL;
bsi.tsi.container = NULL;
}
if (!bb->stmt_list)
{
-#ifdef ENABLE_CHECKING
- if (bb->index >= 0)
- abort ();
-#endif
+ gcc_assert (bb->index < 0);
bsi.tsi.ptr = NULL;
bsi.tsi.container = NULL;
return bsi;
be placed at the start of the basic block. This would not work if the
first statement was not label; rather fail here than enable the user
proceed in wrong way. */
- if (TREE_CODE (tsi_stmt (bsi.tsi)) != LABEL_EXPR)
- abort ();
+ gcc_assert (TREE_CODE (tsi_stmt (bsi.tsi)) == LABEL_EXPR);
next = bsi.tsi;
tsi_next (&next);
bsi.tsi = tsi_last (bb->stmt_list);
else
{
-#ifdef ENABLE_CHECKING
- if (bb->index >= 0)
- abort ();
-#endif
+ gcc_assert (bb->index < 0);
bsi.tsi.ptr = NULL;
bsi.tsi.container = NULL;
}
return bb->loop_father;
}
-/* Return true if VAR may be aliased. */
-static inline bool
-may_be_aliased (tree var)
-{
- return (TREE_ADDRESSABLE (var)
- || decl_function_context (var) != current_function_decl);
-}
-
/* Return true if VAR is a clobbered by function calls. */
static inline bool
is_call_clobbered (tree var)
{
- return needs_to_live_in_memory (var)
+ return is_global_var (var)
|| bitmap_bit_p (call_clobbered_vars, var_ann (var)->uid);
}
mark_call_clobbered (tree var)
{
var_ann_t ann = var_ann (var);
- /* Call-clobbered variables need to live in memory. */
- DECL_NEEDS_TO_LIVE_IN_MEMORY_INTERNAL (var) = 1;
+ /* If VAR is a memory tag, then we need to consider it a global
+ variable. This is because the pointer that VAR represents has
+ been found to point to either an arbitrary location or to a known
+ location in global memory. */
+ if (ann->mem_tag_kind != NOT_A_TAG && ann->mem_tag_kind != STRUCT_FIELD)
+ DECL_EXTERNAL (var) = 1;
bitmap_set_bit (call_clobbered_vars, ann->uid);
+ ssa_call_clobbered_cache_valid = false;
+ ssa_ro_call_cache_valid = false;
+}
+
+/* Clear the call-clobbered attribute from variable VAR. */
+static inline void
+clear_call_clobbered (tree var)
+{
+ var_ann_t ann = var_ann (var);
+ if (ann->mem_tag_kind != NOT_A_TAG && ann->mem_tag_kind != STRUCT_FIELD)
+ DECL_EXTERNAL (var) = 0;
+ bitmap_clear_bit (call_clobbered_vars, ann->uid);
+ ssa_call_clobbered_cache_valid = false;
+ ssa_ro_call_cache_valid = false;
}
/* Mark variable VAR as being non-addressable. */
mark_non_addressable (tree var)
{
bitmap_clear_bit (call_clobbered_vars, var_ann (var)->uid);
- DECL_NEEDS_TO_LIVE_IN_MEMORY_INTERNAL (var) = 0;
TREE_ADDRESSABLE (var) = 0;
+ ssa_call_clobbered_cache_valid = false;
+ ssa_ro_call_cache_valid = false;
}
/* Return the common annotation for T. Return NULL if the annotation
return (ann) ? ann : create_tree_ann (t);
}
+/* ----------------------------------------------------------------------- */
+
+/* The following set of routines are used to iterator over various type of
+ SSA operands. */
+
+/* Return true if PTR is finished iterating. */
+static inline bool
+op_iter_done (ssa_op_iter *ptr)
+{
+ return ptr->done;
+}
+
+/* Get the next iterator use value for PTR. */
+static inline use_operand_p
+op_iter_next_use (ssa_op_iter *ptr)
+{
+ if (ptr->use_i < ptr->num_use)
+ {
+ return USE_OP_PTR (ptr->ops->use_ops, (ptr->use_i)++);
+ }
+ if (ptr->vuse_i < ptr->num_vuse)
+ {
+ return VUSE_OP_PTR (ptr->ops->vuse_ops, (ptr->vuse_i)++);
+ }
+ if (ptr->v_mayu_i < ptr->num_v_mayu)
+ {
+ return V_MAY_DEF_OP_PTR (ptr->ops->v_may_def_ops,
+ (ptr->v_mayu_i)++);
+ }
+ if (ptr->v_mustu_i < ptr->num_v_mustu)
+ {
+ return V_MUST_DEF_KILL_PTR (ptr->ops->v_must_def_ops,
+ (ptr->v_mustu_i)++);
+ }
+ ptr->done = true;
+ return NULL_USE_OPERAND_P;
+}
+
+/* Get the next iterator def value for PTR. */
+static inline def_operand_p
+op_iter_next_def (ssa_op_iter *ptr)
+{
+ if (ptr->def_i < ptr->num_def)
+ {
+ return DEF_OP_PTR (ptr->ops->def_ops, (ptr->def_i)++);
+ }
+ if (ptr->v_mustd_i < ptr->num_v_mustd)
+ {
+ return V_MUST_DEF_RESULT_PTR (ptr->ops->v_must_def_ops,
+ (ptr->v_mustd_i)++);
+ }
+ if (ptr->v_mayd_i < ptr->num_v_mayd)
+ {
+ return V_MAY_DEF_RESULT_PTR (ptr->ops->v_may_def_ops,
+ (ptr->v_mayd_i)++);
+ }
+ ptr->done = true;
+ return NULL_DEF_OPERAND_P;
+}
+
+/* Get the next iterator tree value for PTR. */
+static inline tree
+op_iter_next_tree (ssa_op_iter *ptr)
+{
+ if (ptr->use_i < ptr->num_use)
+ {
+ return USE_OP (ptr->ops->use_ops, (ptr->use_i)++);
+ }
+ if (ptr->vuse_i < ptr->num_vuse)
+ {
+ return VUSE_OP (ptr->ops->vuse_ops, (ptr->vuse_i)++);
+ }
+ if (ptr->v_mayu_i < ptr->num_v_mayu)
+ {
+ return V_MAY_DEF_OP (ptr->ops->v_may_def_ops, (ptr->v_mayu_i)++);
+ }
+ if (ptr->v_mustu_i < ptr->num_v_mustu)
+ {
+ return V_MUST_DEF_KILL (ptr->ops->v_must_def_ops, (ptr->v_mustu_i)++);
+ }
+ if (ptr->def_i < ptr->num_def)
+ {
+ return DEF_OP (ptr->ops->def_ops, (ptr->def_i)++);
+ }
+ if (ptr->v_mustd_i < ptr->num_v_mustd)
+ {
+ return V_MUST_DEF_RESULT (ptr->ops->v_must_def_ops,
+ (ptr->v_mustd_i)++);
+ }
+ if (ptr->v_mayd_i < ptr->num_v_mayd)
+ {
+ return V_MAY_DEF_RESULT (ptr->ops->v_may_def_ops,
+ (ptr->v_mayd_i)++);
+ }
+ ptr->done = true;
+ return NULL;
+}
+
+/* Initialize the iterator PTR to the virtual defs in STMT. */
+static inline void
+op_iter_init (ssa_op_iter *ptr, tree stmt, int flags)
+{
+ stmt_operands_p ops;
+ stmt_ann_t ann = get_stmt_ann (stmt);
+
+ ops = &(ann->operands);
+ ptr->done = false;
+ ptr->ops = ops;
+ ptr->num_def = (flags & SSA_OP_DEF) ? NUM_DEFS (ops->def_ops) : 0;
+ ptr->num_use = (flags & SSA_OP_USE) ? NUM_USES (ops->use_ops) : 0;
+ ptr->num_vuse = (flags & SSA_OP_VUSE) ? NUM_VUSES (ops->vuse_ops) : 0;
+ ptr->num_v_mayu = (flags & SSA_OP_VMAYUSE)
+ ? NUM_V_MAY_DEFS (ops->v_may_def_ops) : 0;
+ ptr->num_v_mayd = (flags & SSA_OP_VMAYDEF)
+ ? NUM_V_MAY_DEFS (ops->v_may_def_ops) : 0;
+ ptr->num_v_mustu = (flags & SSA_OP_VMUSTDEFKILL)
+ ? NUM_V_MUST_DEFS (ops->v_must_def_ops) : 0;
+ ptr->num_v_mustd = (flags & SSA_OP_VMUSTDEF)
+ ? NUM_V_MUST_DEFS (ops->v_must_def_ops) : 0;
+ ptr->def_i = 0;
+ ptr->use_i = 0;
+ ptr->vuse_i = 0;
+ ptr->v_mayu_i = 0;
+ ptr->v_mayd_i = 0;
+ ptr->v_mustu_i = 0;
+ ptr->v_mustd_i = 0;
+}
+
+/* Initialize iterator PTR to the use operands in STMT based on FLAGS. Return
+ the first use. */
+static inline use_operand_p
+op_iter_init_use (ssa_op_iter *ptr, tree stmt, int flags)
+{
+ op_iter_init (ptr, stmt, flags);
+ return op_iter_next_use (ptr);
+}
+
+/* Initialize iterator PTR to the def operands in STMT based on FLAGS. Return
+ the first def. */
+static inline def_operand_p
+op_iter_init_def (ssa_op_iter *ptr, tree stmt, int flags)
+{
+ op_iter_init (ptr, stmt, flags);
+ return op_iter_next_def (ptr);
+}
+
+/* Initialize iterator PTR to the operands in STMT based on FLAGS. Return
+ the first operand as a tree. */
+static inline tree
+op_iter_init_tree (ssa_op_iter *ptr, tree stmt, int flags)
+{
+ op_iter_init (ptr, stmt, flags);
+ return op_iter_next_tree (ptr);
+}
+
+/* Get the next iterator mustdef value for PTR, returning the mustdef values in
+ KILL and DEF. */
+static inline void
+op_iter_next_mustdef (use_operand_p *kill, def_operand_p *def, ssa_op_iter *ptr)
+{
+ if (ptr->v_mustu_i < ptr->num_v_mustu)
+ {
+ *def = V_MUST_DEF_RESULT_PTR (ptr->ops->v_must_def_ops, ptr->v_mustu_i);
+ *kill = V_MUST_DEF_KILL_PTR (ptr->ops->v_must_def_ops, (ptr->v_mustu_i)++);
+ return;
+ }
+ else
+ {
+ *def = NULL_DEF_OPERAND_P;
+ *kill = NULL_USE_OPERAND_P;
+ }
+ ptr->done = true;
+ return;
+}
+/* Get the next iterator maydef value for PTR, returning the maydef values in
+ USE and DEF. */
+static inline void
+op_iter_next_maydef (use_operand_p *use, def_operand_p *def, ssa_op_iter *ptr)
+{
+ if (ptr->v_mayu_i < ptr->num_v_mayu)
+ {
+ *def = V_MAY_DEF_RESULT_PTR (ptr->ops->v_may_def_ops, ptr->v_mayu_i);
+ *use = V_MAY_DEF_OP_PTR (ptr->ops->v_may_def_ops, (ptr->v_mayu_i)++);
+ return;
+ }
+ else
+ {
+ *def = NULL_DEF_OPERAND_P;
+ *use = NULL_USE_OPERAND_P;
+ }
+ ptr->done = true;
+ return;
+}
+
+/* Initialize iterator PTR to the operands in STMT. Return the first operands
+ in USE and DEF. */
+static inline void
+op_iter_init_maydef (ssa_op_iter *ptr, tree stmt, use_operand_p *use,
+ def_operand_p *def)
+{
+ op_iter_init (ptr, stmt, SSA_OP_VMAYUSE);
+ op_iter_next_maydef (use, def, ptr);
+}
+
+/* Initialize iterator PTR to the operands in STMT. Return the first operands
+ in KILL and DEF. */
+static inline void
+op_iter_init_mustdef (ssa_op_iter *ptr, tree stmt, use_operand_p *kill,
+ def_operand_p *def)
+{
+ op_iter_init (ptr, stmt, SSA_OP_VMUSTDEFKILL);
+ op_iter_next_mustdef (kill, def, ptr);
+}
+
+/* Return true if REF, a COMPONENT_REF, has an ARRAY_REF somewhere in it. */
+
+static inline bool
+ref_contains_array_ref (tree ref)
+{
+ while (handled_component_p (ref))
+ {
+ if (TREE_CODE (ref) == ARRAY_REF)
+ return true;
+ ref = TREE_OPERAND (ref, 0);
+ }
+ return false;
+}
+
+/* Given a variable VAR, lookup and return a pointer to the list of
+ subvariables for it. */
+
+static inline subvar_t *
+lookup_subvars_for_var (tree var)
+{
+ var_ann_t ann = var_ann (var);
+ gcc_assert (ann);
+ return &ann->subvars;
+}
+
+/* Given a variable VAR, return a linked list of subvariables for VAR, or
+ NULL, if there are no subvariables. */
+
+static inline subvar_t
+get_subvars_for_var (tree var)
+{
+ subvar_t subvars;
+
+ gcc_assert (SSA_VAR_P (var));
+
+ if (TREE_CODE (var) == SSA_NAME)
+ subvars = *(lookup_subvars_for_var (SSA_NAME_VAR (var)));
+ else
+ subvars = *(lookup_subvars_for_var (var));
+ return subvars;
+}
+
+/* Return true if V is a tree that we can have subvars for.
+ Normally, this is any aggregate type, however, due to implementation
+ limitations ATM, we exclude array types as well. */
+
+static inline bool
+var_can_have_subvars (tree v)
+{
+ return (AGGREGATE_TYPE_P (TREE_TYPE (v)) &&
+ TREE_CODE (TREE_TYPE (v)) != ARRAY_TYPE);
+}
+
+
+/* Return true if OFFSET and SIZE define a range that overlaps with some
+ portion of the range of SV, a subvar. If there was an exact overlap,
+ *EXACT will be set to true upon return. */
+
+static inline bool
+overlap_subvar (HOST_WIDE_INT offset, HOST_WIDE_INT size,
+ subvar_t sv, bool *exact)
+{
+ /* There are three possible cases of overlap.
+ 1. We can have an exact overlap, like so:
+ |offset, offset + size |
+ |sv->offset, sv->offset + sv->size |
+
+ 2. We can have offset starting after sv->offset, like so:
+
+ |offset, offset + size |
+ |sv->offset, sv->offset + sv->size |
+
+ 3. We can have offset starting before sv->offset, like so:
+
+ |offset, offset + size |
+ |sv->offset, sv->offset + sv->size|
+ */
+
+ if (exact)
+ *exact = false;
+ if (offset == sv->offset && size == sv->size)
+ {
+ if (exact)
+ *exact = true;
+ return true;
+ }
+ else if (offset >= sv->offset && offset < (sv->offset + sv->size))
+ {
+ return true;
+ }
+ else if (offset < sv->offset && (offset + size > sv->offset))
+ {
+ return true;
+ }
+ return false;
+
+}
+
#endif /* _TREE_FLOW_INLINE_H */