/* Bitmap obstack for our datastructures that needs to survive across
compilations of multiple functions. */
static bitmap_obstack operands_bitmap_obstack;
+
/* Set for building all the loaded symbols. */
static bitmap build_loads;
get_expr_operands. FULL_REF is a tree that contains the entire
pointer dereference expression, if available, or NULL otherwise.
OFFSET and SIZE come from the memory access expression that
- generated this virtual operand. */
+ generated this virtual operand. IS_CALL_SITE is true if the
+ affected statement is a call site. */
static void
add_virtual_operand (tree var, stmt_ann_t s_ann, int flags,
tree full_ref, HOST_WIDE_INT offset,
- HOST_WIDE_INT size)
+ HOST_WIDE_INT size, bool is_call_site)
{
bitmap aliases = NULL;
tree sym;
if (MTAG_P (var))
aliases = MTAG_ALIASES (var);
+
if (aliases == NULL)
{
if (s_ann && !gimple_aliases_computed_p (cfun))
s_ann->has_volatile_ops = true;
+
/* The variable is not aliased or it is an alias tag. */
if (flags & opf_def)
append_vdef (var);
al = referenced_var (i);
if (!access_can_touch_variable (full_ref, al, offset, size))
continue;
-
+
+ /* Call-clobbered tags may have non-call-clobbered
+ symbols in their alias sets. Ignore them if we are
+ adding VOPs for a call site. */
+ if (is_call_site && !is_call_clobbered (al))
+ continue;
+
none_added = false;
append_vdef (al);
}
al = referenced_var (i);
if (!access_can_touch_variable (full_ref, al, offset, size))
continue;
+
+ /* Call-clobbered tags may have non-call-clobbered
+ symbols in their alias sets. Ignore them if we are
+ adding VOPs for a call site. */
+ if (is_call_site && !is_call_clobbered (al))
+ continue;
+
none_added = false;
append_vuse (al);
}
append_use (var_p);
}
else
- add_virtual_operand (var, s_ann, flags, NULL_TREE, 0, -1);
+ add_virtual_operand (var, s_ann, flags, NULL_TREE, 0, -1, false);
}
{
/* PTR has its own memory tag. Use it. */
add_virtual_operand (pi->name_mem_tag, s_ann, flags,
- full_ref, offset, size);
+ full_ref, offset, size, false);
}
else
{
if (v_ann->symbol_mem_tag)
add_virtual_operand (v_ann->symbol_mem_tag, s_ann, flags,
- full_ref, offset, size);
- /* Aliasing information is missing; mark statement as volatile so we
- won't optimize it out too actively. */
- else if (s_ann && !gimple_aliases_computed_p (cfun)
+ full_ref, offset, size, false);
+
+ /* Aliasing information is missing; mark statement as
+ volatile so we won't optimize it out too actively. */
+ else if (s_ann
+ && !gimple_aliases_computed_p (cfun)
&& (flags & opf_def))
s_ann->has_volatile_ops = true;
}
if (s_ann)
s_ann->makes_clobbering_call = true;
- /* If we created .GLOBAL_VAR earlier, just use it. See compute_may_aliases
- for the heuristic used to decide whether to create .GLOBAL_VAR or not. */
+ /* If we created .GLOBAL_VAR earlier, just use it. */
if (gimple_global_var (cfun))
{
tree var = gimple_global_var (cfun);
- add_stmt_operand (&var, s_ann, opf_def);
+ add_virtual_operand (var, s_ann, opf_def, NULL, 0, -1, true);
return;
}
if (TREE_CODE (var) == STRUCT_FIELD_TAG)
real_var = SFT_PARENT_VAR (var);
- not_read = not_read_b ? bitmap_bit_p (not_read_b,
- DECL_UID (real_var)) : false;
- not_written = not_written_b ? bitmap_bit_p (not_written_b,
- DECL_UID (real_var)) : false;
+ not_read = not_read_b
+ ? bitmap_bit_p (not_read_b, DECL_UID (real_var))
+ : false;
+
+ not_written = not_written_b
+ ? bitmap_bit_p (not_written_b, DECL_UID (real_var))
+ : false;
gcc_assert (!unmodifiable_var_p (var));
clobber_stats.clobbered_vars++;
tree call = get_call_expr_in (stmt);
if (call_expr_flags (call) & (ECF_CONST | ECF_PURE))
{
- add_stmt_operand (&var, s_ann, opf_use);
+ add_virtual_operand (var, s_ann, opf_use, NULL, 0, -1, true);
clobber_stats.unescapable_clobbers_avoided++;
continue;
}
{
clobber_stats.static_write_clobbers_avoided++;
if (!not_read)
- add_stmt_operand (&var, s_ann, opf_use);
+ add_virtual_operand (var, s_ann, opf_use, NULL, 0, -1, true);
else
clobber_stats.static_read_clobbers_avoided++;
}
else
- add_virtual_operand (var, s_ann, opf_def, NULL, 0, -1);
+ add_virtual_operand (var, s_ann, opf_def, NULL, 0, -1, true);
}
}
if (gimple_global_var (cfun))
{
tree var = gimple_global_var (cfun);
- add_stmt_operand (&var, s_ann, opf_use);
+ add_virtual_operand (var, s_ann, opf_use, NULL, 0, -1, true);
return;
}
continue;
}
- add_stmt_operand (&var, s_ann, opf_use | opf_implicit);
+ add_virtual_operand (var, s_ann, opf_use, NULL, 0, -1, true);
}
}
{
tree type = TREE_TYPE (t);
- if (POINTER_TYPE_P (type) || AGGREGATE_TYPE_P (type)
+ if (POINTER_TYPE_P (type)
+ || AGGREGATE_TYPE_P (type)
|| TREE_CODE (type) == COMPLEX_TYPE)
return true;
+
return false;
}
tree pttype = TREE_TYPE (TREE_TYPE (t));
get_constraint_for (exp, results);
+
/* Make sure we capture constraints to all elements
of an array. */
if ((handled_component_p (exp)
}
}
+
/* Update related alias information kept in AI. This is used when
building name tags, alias sets and deciding grouping heuristics.
STMT is the statement to process. This function also updates
}
}
/* In IPA mode, we need to generate constraints to pass call
- arguments through their calls. There are two case, either a
- modify_expr when we are returning a value, or just a plain
- call_expr when we are not. */
+ arguments through their calls. There are two cases, either a
+ GIMPLE_MODIFY_STMT when we are returning a value, or just a plain
+ CALL_EXPR when we are not. */
else if (in_ipa_mode
&& ((TREE_CODE (t) == GIMPLE_MODIFY_STMT
&& TREE_CODE (GIMPLE_STMT_OPERAND (t, 1)) == CALL_EXPR
}
i++;
}
+
/* If we are returning a value, assign it to the result. */
if (lhsop)
{
tree t;
struct constraint_expr lhs, rhs;
- /* For each incoming pointer argument arg, ARG = ANYTHING or a
- dummy variable if flag_argument_noalias > 2. */
+ /* For each incoming pointer argument arg, create the constraint ARG
+ = ANYTHING or a dummy variable if flag_argument_noalias is set. */
for (t = DECL_ARGUMENTS (current_function_decl); t; t = TREE_CHAIN (t))
{
varinfo_t p;