#include "tree-flow.h"
#include "tree-inline.h"
#include "diagnostic-core.h"
-#include "toplev.h"
#include "gimple.h"
#include "hashtab.h"
#include "function.h"
static varinfo_t first_or_preceding_vi_for_offset (varinfo_t,
unsigned HOST_WIDE_INT);
static varinfo_t lookup_vi_for_tree (tree);
+static inline bool type_can_have_subvars (const_tree);
/* Pool of variable info structures. */
static alloc_pool variable_info_pool;
ret->is_global_var = (t == NULL_TREE);
ret->is_fn_info = false;
if (t && DECL_P (t))
- ret->is_global_var = is_global_var (t);
+ ret->is_global_var = (is_global_var (t)
+ /* We have to treat even local register variables
+ as escape points. */
+ || (TREE_CODE (t) == VAR_DECL
+ && DECL_HARD_REGISTER (t)));
ret->solution = BITMAP_ALLOC (&pta_obstack);
ret->oldsolution = BITMAP_ALLOC (&oldpta_obstack);
ret->next = NULL;
static HOST_WIDE_INT
bitpos_of_field (const tree fdecl)
{
-
if (!host_integerp (DECL_FIELD_OFFSET (fdecl), 0)
|| !host_integerp (DECL_FIELD_BIT_OFFSET (fdecl), 0))
return -1;
- return (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (fdecl)) * 8
+ return (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (fdecl)) * BITS_PER_UNIT
+ TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (fdecl)));
}
/* If we are not taking the address then make sure to process
all subvariables we might access. */
+ if (address_p)
+ return;
+
cs = *VEC_last (ce_s, *results);
- if (address_p
- || cs.type != SCALAR)
+ if (cs.type == DEREF
+ && type_can_have_subvars (TREE_TYPE (t)))
+ {
+ /* For dereferences this means we have to defer it
+ to solving time. */
+ VEC_last (ce_s, *results)->offset = UNKNOWN_OFFSET;
+ return;
+ }
+ if (cs.type != SCALAR)
return;
vi = get_varinfo (cs.var);
do_structure_copy (lhsop, rhsop);
else
{
+ enum tree_code code = gimple_assign_rhs_code (t);
+
get_constraint_for (lhsop, &lhsc);
- if (gimple_assign_rhs_code (t) == POINTER_PLUS_EXPR)
+ if (code == POINTER_PLUS_EXPR)
get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
gimple_assign_rhs2 (t), &rhsc);
- else if (gimple_assign_rhs_code (t) == BIT_AND_EXPR
+ else if (code == BIT_AND_EXPR
&& TREE_CODE (gimple_assign_rhs2 (t)) == INTEGER_CST)
{
/* Aligning a pointer via a BIT_AND_EXPR is offsetting
get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
NULL_TREE, &rhsc);
}
- else if ((CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (t))
+ else if ((CONVERT_EXPR_CODE_P (code)
&& !(POINTER_TYPE_P (gimple_expr_type (t))
&& !POINTER_TYPE_P (TREE_TYPE (rhsop))))
|| gimple_assign_single_p (t))
get_constraint_for_rhs (rhsop, &rhsc);
+ else if (truth_value_p (code))
+ /* Truth value results are not pointer (parts). Or at least
+ very very unreasonable obfuscation of a part. */
+ ;
else
{
/* All other operations are merges. */
&& (!in_ipa_mode
|| DECL_EXTERNAL (lhsop) || TREE_PUBLIC (lhsop)))
make_escape_constraint (rhsop);
- /* If this is a conversion of a non-restrict pointer to a
- restrict pointer track it with a new heapvar. */
- else if (gimple_assign_cast_p (t)
- && POINTER_TYPE_P (TREE_TYPE (rhsop))
- && POINTER_TYPE_P (TREE_TYPE (lhsop))
- && !TYPE_RESTRICT (TREE_TYPE (rhsop))
- && TYPE_RESTRICT (TREE_TYPE (lhsop)))
- make_constraint_from_restrict (get_vi_for_tree (lhsop),
- "CAST_RESTRICT");
}
/* Handle escapes through return. */
else if (gimple_code (t) == GIMPLE_RETURN
VEC_qsort (fieldoff_s, fieldstack, fieldoff_compare);
}
+/* Return true if T is a type that can have subvars. */
+
+static inline bool
+type_can_have_subvars (const_tree t)
+{
+ /* Aggregates without overlapping fields can have subvars. */
+ return TREE_CODE (t) == RECORD_TYPE;
+}
+
/* Return true if V is a tree that we can have subvars for.
Normally, this is any aggregate type. Also complex
types which are not gimple registers can have subvars. */
if (!DECL_P (v))
return false;
- /* Aggregates without overlapping fields can have subvars. */
- if (TREE_CODE (TREE_TYPE (v)) == RECORD_TYPE)
- return true;
-
- return false;
+ return type_can_have_subvars (TREE_TYPE (v));
}
/* Return true if T is a type that does contain pointers. */
if (!VEC_empty (fieldoff_s, *fieldstack))
pair = VEC_last (fieldoff_s, *fieldstack);
+ /* If there isn't anything at offset zero, create sth. */
+ if (!pair
+ && offset + foff != 0)
+ {
+ pair = VEC_safe_push (fieldoff_s, heap, *fieldstack, NULL);
+ pair->offset = 0;
+ pair->size = offset + foff;
+ pair->has_unknown_size = false;
+ pair->must_have_pointers = false;
+ pair->may_have_pointers = false;
+ pair->only_restrict_pointers = false;
+ }
+
if (!DECL_SIZE (field)
|| !host_integerp (DECL_SIZE (field), 1))
has_unknown_size = true;
{
memset (pt, 0, sizeof (struct pt_solution));
pt->vars = BITMAP_GGC_ALLOC ();
- bitmap_set_bit (pt->vars, DECL_UID (var));
+ bitmap_set_bit (pt->vars, DECL_PT_UID (var));
pt->vars_contains_global = is_global_var (var);
}
push_cfun (func);
current_function_decl = node->decl;
- /* For externally visible functions use local constraints for
- their arguments. For local functions we see all callers
- and thus do not need initial constraints for parameters. */
if (node->local.externally_visible)
- intra_create_variable_infos ();
+ {
+ /* For externally visible functions use local constraints for
+ their arguments. For local functions we see all callers
+ and thus do not need initial constraints for parameters. */
+ intra_create_variable_infos ();
+
+ /* We also need to make function return values escape. Nothing
+ escapes by returning from main though. */
+ if (!MAIN_NAME_P (DECL_NAME (node->decl)))
+ {
+ varinfo_t fi, rvi;
+ fi = lookup_vi_for_tree (node->decl);
+ rvi = first_vi_for_offset (fi, fi_result);
+ if (rvi && rvi->offset == fi_result)
+ {
+ struct constraint_expr includes;
+ struct constraint_expr var;
+ includes.var = escaped_id;
+ includes.offset = 0;
+ includes.type = SCALAR;
+ var.var = rvi->id;
+ var.offset = 0;
+ var.type = SCALAR;
+ process_constraint (new_constraint (includes, var));
+ }
+ }
+ }
/* Build constriants for the function body. */
FOR_EACH_BB_FN (bb, func)