/* Gimple IR support functions.
- Copyright 2007, 2008, 2009 Free Software Foundation, Inc.
+ Copyright 2007, 2008, 2009, 2010 Free Software Foundation, Inc.
Contributed by Aldy Hernandez <aldyh@redhat.com>
This file is part of GCC.
/* Build a tuple with operands. CODE is the statement to build (which
must be one of the GIMPLE_WITH_OPS tuples). SUBCODE is the sub-code
- for the new tuple. NUM_OPS is the number of operands to allocate. */
+ for the new tuple. NUM_OPS is the number of operands to allocate. */
#define gimple_build_with_ops(c, s, n) \
gimple_build_with_ops_stat (c, s, n MEM_STAT_INFO)
return s;
}
+/* Reset alias information on call S. */
+
+void
+gimple_call_reset_alias_info (gimple s)
+{
+ if (gimple_call_flags (s) & ECF_CONST)
+ memset (gimple_call_use_set (s), 0, sizeof (struct pt_solution));
+ else
+ pt_solution_reset (gimple_call_use_set (s));
+ if (gimple_call_flags (s) & (ECF_CONST|ECF_PURE|ECF_NOVOPS))
+ memset (gimple_call_clobber_set (s), 0, sizeof (struct pt_solution));
+ else
+ pt_solution_reset (gimple_call_clobber_set (s));
+}
+
/* Helper for gimple_build_call, gimple_build_call_vec and
gimple_build_call_from_tree. Build the basic components of a
GIMPLE_CALL statement to function FN with NARGS arguments. */
if (TREE_CODE (fn) == FUNCTION_DECL)
fn = build_fold_addr_expr (fn);
gimple_set_op (s, 1, fn);
+ gimple_call_reset_alias_info (s);
return s;
}
/* Need 1 operand for LHS and 1 or 2 for the RHS (depending on the
code). */
num_ops = get_gimple_rhs_num_ops (subcode) + 1;
-
+
p = gimple_build_with_ops_stat (GIMPLE_ASSIGN, (unsigned)subcode, num_ops
PASS_MEM_STAT);
gimple_assign_set_lhs (p, lhs);
gimple
gimplify_assign (tree dst, tree src, gimple_seq *seq_p)
-{
+{
tree t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
gimplify_and_add (t, seq_p);
ggc_free (t);
/* Build a GIMPLE_NOP statement. */
-gimple
+gimple
gimple_build_nop (void)
{
return gimple_alloc (GIMPLE_NOP, 0);
*/
static inline gimple
-gimple_build_asm_1 (const char *string, unsigned ninputs, unsigned noutputs,
+gimple_build_asm_1 (const char *string, unsigned ninputs, unsigned noutputs,
unsigned nclobbers, unsigned nlabels)
{
gimple p;
#ifdef GATHER_STATISTICS
gimple_alloc_sizes[(int) gimple_alloc_kind (GIMPLE_ASM)] += size;
#endif
-
+
return p;
}
LABELS is a vector of destination labels. */
gimple
-gimple_build_asm_vec (const char *string, VEC(tree,gc)* inputs,
+gimple_build_asm_vec (const char *string, VEC(tree,gc)* inputs,
VEC(tree,gc)* outputs, VEC(tree,gc)* clobbers,
VEC(tree,gc)* labels)
{
p = gimple_build_asm_1 (string,
VEC_length (tree, inputs),
- VEC_length (tree, outputs),
+ VEC_length (tree, outputs),
VEC_length (tree, clobbers),
VEC_length (tree, labels));
-
+
for (i = 0; i < VEC_length (tree, inputs); i++)
gimple_asm_set_input_op (p, i, VEC_index (tree, inputs, i));
for (i = 0; i < VEC_length (tree, clobbers); i++)
gimple_asm_set_clobber_op (p, i, VEC_index (tree, clobbers, i));
-
+
for (i = 0; i < VEC_length (tree, labels); i++)
gimple_asm_set_label_op (p, i, VEC_index (tree, labels, i));
-
+
return p;
}
NLABELS is the number of labels in the switch excluding the default.
DEFAULT_LABEL is the default label for the switch statement. */
-gimple
+gimple
gimple_build_switch_nlabels (unsigned nlabels, tree index, tree default_label)
{
/* nlabels + 1 default label + 1 index. */
/* Build a GIMPLE_SWITCH statement.
INDEX is the switch's index.
- NLABELS is the number of labels in the switch excluding the DEFAULT_LABEL.
+ NLABELS is the number of labels in the switch excluding the DEFAULT_LABEL.
... are the labels excluding the default. */
-gimple
+gimple
gimple_build_switch (unsigned nlabels, tree index, tree default_label, ...)
{
va_list al;
BODY is the sequence of statements for which only one thread can execute.
NAME is optional identifier for this critical block. */
-gimple
+gimple
gimple_build_omp_critical (gimple_seq body, tree name)
{
gimple p = gimple_alloc (GIMPLE_OMP_CRITICAL, 0);
/* Build a GIMPLE_OMP_FOR statement.
BODY is sequence of statements inside the for loop.
- CLAUSES, are any of the OMP loop construct's clauses: private, firstprivate,
+ CLAUSES, are any of the OMP loop construct's clauses: private, firstprivate,
lastprivate, reductions, ordered, schedule, and nowait.
COLLAPSE is the collapse count.
PRE_BODY is the sequence of statements that are loop invariant. */
CHILD_FN is the function created for the parallel threads to execute.
DATA_ARG are the shared data argument(s). */
-gimple
-gimple_build_omp_parallel (gimple_seq body, tree clauses, tree child_fn,
+gimple
+gimple_build_omp_parallel (gimple_seq body, tree clauses, tree child_fn,
tree data_arg)
{
gimple p = gimple_alloc (GIMPLE_OMP_PARALLEL, 0);
COPY_FN is the optional function for firstprivate initialization.
ARG_SIZE and ARG_ALIGN are size and alignment of the data block. */
-gimple
+gimple
gimple_build_omp_task (gimple_seq body, tree clauses, tree child_fn,
tree data_arg, tree copy_fn, tree arg_size,
tree arg_align)
BODY is the sequence of statements to be executed by just the master. */
-gimple
+gimple
gimple_build_omp_master (gimple_seq body)
{
gimple p = gimple_alloc (GIMPLE_OMP_MASTER, 0);
CONTROL_DEF is the definition of the control variable.
CONTROL_USE is the use of the control variable. */
-gimple
+gimple
gimple_build_omp_continue (tree control_def, tree control_use)
{
gimple p = gimple_alloc (GIMPLE_OMP_CONTINUE, 0);
BODY is the sequence of statements inside a loop that will executed in
sequence. */
-gimple
+gimple
gimple_build_omp_ordered (gimple_seq body)
{
gimple p = gimple_alloc (GIMPLE_OMP_ORDERED, 0);
/* Build a GIMPLE_OMP_RETURN statement.
WAIT_P is true if this is a non-waiting return. */
-gimple
+gimple
gimple_build_omp_return (bool wait_p)
{
gimple p = gimple_alloc (GIMPLE_OMP_RETURN, 0);
CLAUSES are any of the OMP sections contsruct's clauses: private,
firstprivate, lastprivate, reduction, and nowait. */
-gimple
+gimple
gimple_build_omp_sections (gimple_seq body, tree clauses)
{
gimple p = gimple_alloc (GIMPLE_OMP_SECTIONS, 0);
CLAUSES are any of the OMP single construct's clauses: private, firstprivate,
copyprivate, nowait. */
-gimple
+gimple
gimple_build_omp_single (gimple_seq body, tree clauses)
{
gimple p = gimple_alloc (GIMPLE_OMP_SINGLE, 0);
/* If this triggers, it's a sign that the same list is being freed
twice. */
gcc_assert (seq != gimple_seq_cache || gimple_seq_cache == NULL);
-
+
/* Add SEQ to the pool of free sequences. */
seq->next_free = gimple_seq_cache;
gimple_seq_cache = seq;
/* Walk all the statements in the sequence SEQ calling walk_gimple_stmt
on each one. WI is as in walk_gimple_stmt.
-
+
If walk_gimple_stmt returns non-NULL, the walk is stopped, the
value is stored in WI->CALLBACK_RESULT and the statement that
produced the value is returned.
The return value is that returned by the last call to walk_tree, or
NULL_TREE if no CALLBACK_OP is specified. */
-inline tree
+tree
walk_gimple_op (gimple stmt, walk_tree_fn callback_op,
struct walk_stmt_info *wi)
{
switch (gimple_code (stmt))
{
case GIMPLE_ASSIGN:
- /* Walk the RHS operands. A formal temporary LHS may use a
- COMPONENT_REF RHS. */
+ /* Walk the RHS operands. If the LHS is of a non-renamable type or
+ is a register variable, we may use a COMPONENT_REF on the RHS. */
if (wi)
- wi->val_only = !is_gimple_reg (gimple_assign_lhs (stmt))
- || !gimple_assign_single_p (stmt);
+ {
+ tree lhs = gimple_assign_lhs (stmt);
+ wi->val_only
+ = (is_gimple_reg_type (TREE_TYPE (lhs)) && !is_gimple_reg (lhs))
+ || !gimple_assign_single_p (stmt);
+ }
for (i = 1; i < gimple_num_ops (stmt); i++)
{
assignment. I suspect there may be cases where gimple_assign_copy_p,
gimple_assign_single_p, or equivalent logic is used where a similar
treatment of unary NOPs is appropriate. */
-
+
bool
gimple_assign_unary_nop_p (gimple gs)
{
}
-/* Fold the expression computed by STMT. If the expression can be
- folded, return the folded result, otherwise return NULL. STMT is
- not modified. */
-
-tree
-gimple_fold (const_gimple stmt)
-{
- location_t loc = gimple_location (stmt);
- switch (gimple_code (stmt))
- {
- case GIMPLE_COND:
- return fold_binary_loc (loc, gimple_cond_code (stmt),
- boolean_type_node,
- gimple_cond_lhs (stmt),
- gimple_cond_rhs (stmt));
-
- case GIMPLE_ASSIGN:
- switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
- {
- case GIMPLE_UNARY_RHS:
- return fold_unary_loc (loc, gimple_assign_rhs_code (stmt),
- TREE_TYPE (gimple_assign_lhs (stmt)),
- gimple_assign_rhs1 (stmt));
- case GIMPLE_BINARY_RHS:
- return fold_binary_loc (loc, gimple_assign_rhs_code (stmt),
- TREE_TYPE (gimple_assign_lhs (stmt)),
- gimple_assign_rhs1 (stmt),
- gimple_assign_rhs2 (stmt));
- case GIMPLE_SINGLE_RHS:
- return fold (gimple_assign_rhs1 (stmt));
- default:;
- }
- break;
-
- case GIMPLE_SWITCH:
- return gimple_switch_index (stmt);
-
- case GIMPLE_CALL:
- return NULL_TREE;
-
- default:
- break;
- }
-
- gcc_unreachable ();
-}
-
-
/* Modify the RHS of the assignment pointed-to by GSI using the
operands in the expression tree EXPR.
gcc_unreachable();
}
+/* Replace the LHS of STMT, an assignment, either a GIMPLE_ASSIGN or a
+ GIMPLE_CALL, with NLHS, in preparation for modifying the RHS to an
+ expression with a different value.
+
+ This will update any annotations (say debug bind stmts) referring
+ to the original LHS, so that they use the RHS instead. This is
+ done even if NLHS and LHS are the same, for it is understood that
+ the RHS will be modified afterwards, and NLHS will not be assigned
+ an equivalent value.
+
+ Adjusting any non-annotation uses of the LHS, if needed, is a
+ responsibility of the caller.
+
+ The effect of this call should be pretty much the same as that of
+ inserting a copy of STMT before STMT, and then removing the
+ original stmt, at which time gsi_remove() would have update
+ annotations, but using this function saves all the inserting,
+ copying and removing. */
+
+void
+gimple_replace_lhs (gimple stmt, tree nlhs)
+{
+ if (MAY_HAVE_DEBUG_STMTS)
+ {
+ tree lhs = gimple_get_lhs (stmt);
+
+ gcc_assert (SSA_NAME_DEF_STMT (lhs) == stmt);
+
+ insert_debug_temp_for_var_def (NULL, lhs);
+ }
+
+ gimple_set_lhs (stmt, nlhs);
+}
/* Return a deep copy of statement STMT. All the operands from STMT
are reallocated and copied using unshare_expr. The DEF, USE, VDEF
{
while (handled_component_p (t))
t = TREE_OPERAND (t, 0);
-
+
if (SSA_VAR_P (t)
|| TREE_CODE (t) == STRING_CST
|| TREE_CODE (t) == CONSTRUCTOR
tree
canonicalize_cond_expr_cond (tree t)
{
+ /* Strip conversions around boolean operations. */
+ if (CONVERT_EXPR_P (t)
+ && truth_value_p (TREE_CODE (TREE_OPERAND (t, 0))))
+ t = TREE_OPERAND (t, 0);
+
/* For (bool)x use x != 0. */
- if (TREE_CODE (t) == NOP_EXPR
- && TREE_TYPE (t) == boolean_type_node)
+ if (CONVERT_EXPR_P (t)
+ && TREE_CODE (TREE_TYPE (t)) == BOOLEAN_TYPE)
{
tree top0 = TREE_OPERAND (t, 0);
t = build2 (NE_EXPR, TREE_TYPE (t),
/* Return true if the field decls F1 and F2 are at the same offset. */
-static bool
+bool
compare_field_offset (tree f1, tree f2)
{
if (DECL_OFFSET_ALIGN (f1) == DECL_OFFSET_ALIGN (f2))
&& RECORD_OR_UNION_TYPE_P (TREE_TYPE (t1))
&& (!COMPLETE_TYPE_P (TREE_TYPE (t1))
|| !COMPLETE_TYPE_P (TREE_TYPE (t2)))
- && compare_type_names_p (TREE_TYPE (t1), TREE_TYPE (t2), true))
+ && compare_type_names_p (TYPE_MAIN_VARIANT (TREE_TYPE (t1)),
+ TYPE_MAIN_VARIANT (TREE_TYPE (t2)), true))
{
/* Replace the pointed-to incomplete type with the
complete one. */
/* For integer types hash the types min/max values and the string flag. */
if (TREE_CODE (type) == INTEGER_TYPE)
{
- v = iterative_hash_expr (TYPE_MIN_VALUE (type), v);
- v = iterative_hash_expr (TYPE_MAX_VALUE (type), v);
+ /* OMP lowering can introduce error_mark_node in place of
+ random local decls in types. */
+ if (TYPE_MIN_VALUE (type) != error_mark_node)
+ v = iterative_hash_expr (TYPE_MIN_VALUE (type), v);
+ if (TYPE_MAX_VALUE (type) != error_mark_node)
+ v = iterative_hash_expr (TYPE_MAX_VALUE (type), v);
v = iterative_hash_hashval_t (TYPE_STRING_FLAG (type), v);
}
alias_set_type
gimple_get_alias_set (tree t)
{
- static bool recursing_p;
tree u;
/* Permit type-punning when accessing a union, provided the access
}
else if (POINTER_TYPE_P (t))
{
- tree t1;
-
- /* ??? We can end up creating cycles with TYPE_MAIN_VARIANT
- and TYPE_CANONICAL. Avoid recursing endlessly between
- this langhook and get_alias_set. */
- if (recursing_p)
- return -1;
+ /* From the common C and C++ langhook implementation:
- /* Unfortunately, there is no canonical form of a pointer type.
+ Unfortunately, there is no canonical form of a pointer type.
In particular, if we have `typedef int I', then `int *', and
`I *' are different types. So, we have to pick a canonical
representative. We do this below.
can dereference IPP and CIPP. So, we ignore cv-qualifiers on
the pointed-to types. This issue has been reported to the
C++ committee. */
- t1 = build_type_no_quals (t);
- if (t1 != t)
- {
- alias_set_type set;
- recursing_p = true;
- set = get_alias_set (t1);
- recursing_p = false;
- return set;
- }
+
+ /* In addition to the above canonicalization issue with LTO
+ we should also canonicalize `T (*)[]' to `T *' avoiding
+ alias issues with pointer-to element types and pointer-to
+ array types.
+
+ Likewise we need to deal with the situation of incomplete
+ pointed-to types and make `*(struct X **)&a' and
+ `*(struct X {} **)&a' alias. Otherwise we will have to
+ guarantee that all pointer-to incomplete type variants
+ will be replaced by pointer-to complete type variants if
+ they are available.
+
+ With LTO the convenient situation of using `void *' to
+ access and store any pointer type will also become
+ more apparent (and `void *' is just another pointer-to
+ incomplete type). Assigning alias-set zero to `void *'
+ and all pointer-to incomplete types is a not appealing
+ solution. Assigning an effective alias-set zero only
+ affecting pointers might be - by recording proper subset
+ relationships of all pointer alias-sets.
+
+ Pointer-to function types are another grey area which
+ needs caution. Globbing them all into one alias-set
+ or the above effective zero set would work. */
+
+ /* For now just assign the same alias-set to all pointers.
+ That's simple and avoids all the above problems. */
+ if (t != ptr_type_node)
+ return get_alias_set (ptr_type_node);
}
return -1;
tree addr, void *data)
{
bitmap addresses_taken = (bitmap)data;
- while (handled_component_p (addr))
- addr = TREE_OPERAND (addr, 0);
- if (DECL_P (addr))
+ addr = get_base_address (addr);
+ if (addr
+ && DECL_P (addr))
{
bitmap_set_bit (addresses_taken, DECL_UID (addr));
return true;
const char *
gimple_decl_printable_name (tree decl, int verbosity)
{
- gcc_assert (decl && DECL_NAME (decl));
+ if (!DECL_NAME (decl))
+ return NULL;
if (DECL_ASSEMBLER_NAME_SET_P (decl))
{