#include "flags.h"
#include "rtl.h"
#include "toplev.h"
+#include "ggc.h"
+#include "insn-config.h"
+#include "integrate.h"
-static tree bot_manip PROTO((tree));
-static tree perm_manip PROTO((tree));
+static tree bot_manip PROTO((tree *, int *, void *));
+static tree bot_replace PROTO((tree *, int *, void *));
static tree build_cplus_array_type_1 PROTO((tree, tree));
static void list_hash_add PROTO((int, tree));
static int list_hash PROTO((tree, tree, tree));
static tree list_hash_lookup PROTO((int, tree, tree, tree));
-static void propagate_binfo_offsets PROTO((tree, tree));
-static int avoid_overlap PROTO((tree, tree));
-static int lvalue_p_1 PROTO((tree, int));
-static int equal_functions PROTO((tree, tree));
-static tree no_linkage_helper PROTO((tree));
+static cp_lvalue_kind lvalue_p_1 PROTO((tree, int));
+static tree no_linkage_helper PROTO((tree *, int *, void *));
static tree build_srcloc PROTO((char *, int));
-
-#define CEIL(x,y) (((x) + (y) - 1) / (y))
-
-/* Returns non-zero if REF is an lvalue. If
- TREAT_CLASS_RVALUES_AS_LVALUES is non-zero, rvalues of class type
- are considered lvalues. */
-
-static int
+static void mark_list_hash PROTO ((void *));
+static int statement_code_p PROTO((enum tree_code));
+static tree mark_local_for_remap_r PROTO((tree *, int *, void *));
+static tree cp_unsave_r PROTO ((tree *, int *, void *));
+static void cp_unsave PROTO((tree *));
+static tree build_target_expr PROTO((tree, tree));
+
+/* If REF is an lvalue, returns the kind of lvalue that REF is.
+ Otherwise, returns clk_none. If TREAT_CLASS_RVALUES_AS_LVALUES is
+ non-zero, rvalues of class type are considered lvalues. */
+
+static cp_lvalue_kind
lvalue_p_1 (ref, treat_class_rvalues_as_lvalues)
tree ref;
int treat_class_rvalues_as_lvalues;
{
+ cp_lvalue_kind op1_lvalue_kind = clk_none;
+ cp_lvalue_kind op2_lvalue_kind = clk_none;
+
if (TREE_CODE (TREE_TYPE (ref)) == REFERENCE_TYPE)
- return 1;
+ return clk_ordinary;
if (ref == current_class_ptr && flag_this_is_variable <= 0)
- return 0;
+ return clk_none;
switch (TREE_CODE (ref))
{
what they refer to are valid lvals. */
case PREINCREMENT_EXPR:
case PREDECREMENT_EXPR:
- case COMPONENT_REF:
case SAVE_EXPR:
case UNSAVE_EXPR:
case TRY_CATCH_EXPR:
case WITH_CLEANUP_EXPR:
case REALPART_EXPR:
case IMAGPART_EXPR:
+ case NOP_EXPR:
return lvalue_p_1 (TREE_OPERAND (ref, 0),
treat_class_rvalues_as_lvalues);
+ case COMPONENT_REF:
+ op1_lvalue_kind = lvalue_p_1 (TREE_OPERAND (ref, 0),
+ treat_class_rvalues_as_lvalues);
+ if (op1_lvalue_kind
+ /* The "field" can be a FUNCTION_DECL or an OVERLOAD in some
+ situations. */
+ && TREE_CODE (TREE_OPERAND (ref, 1)) == FIELD_DECL
+ && DECL_C_BIT_FIELD (TREE_OPERAND (ref, 1)))
+ {
+ /* Clear the ordinary bit. If this object was a class
+ rvalue we want to preserve that information. */
+ op1_lvalue_kind &= ~clk_ordinary;
+ /* The lvalue is for a btifield. */
+ op1_lvalue_kind |= clk_bitfield;
+ }
+ return op1_lvalue_kind;
+
case STRING_CST:
- return 1;
+ return clk_ordinary;
case VAR_DECL:
if (TREE_READONLY (ref) && ! TREE_STATIC (ref)
&& DECL_LANG_SPECIFIC (ref)
&& DECL_IN_AGGR_P (ref))
- return 0;
+ return clk_none;
case INDIRECT_REF:
case ARRAY_REF:
case PARM_DECL:
case RESULT_DECL:
if (TREE_CODE (TREE_TYPE (ref)) != METHOD_TYPE)
- return 1;
+ return clk_ordinary;
break;
/* A currently unresolved scope ref. */
my_friendly_abort (103);
case OFFSET_REF:
if (TREE_CODE (TREE_OPERAND (ref, 1)) == FUNCTION_DECL)
- return 1;
- return (lvalue_p_1 (TREE_OPERAND (ref, 0),
- treat_class_rvalues_as_lvalues)
- && lvalue_p_1 (TREE_OPERAND (ref, 1),
- treat_class_rvalues_as_lvalues));
+ return clk_ordinary;
+ /* Fall through. */
+ case MAX_EXPR:
+ case MIN_EXPR:
+ op1_lvalue_kind = lvalue_p_1 (TREE_OPERAND (ref, 0),
+ treat_class_rvalues_as_lvalues);
+ op2_lvalue_kind = lvalue_p_1 (TREE_OPERAND (ref, 1),
+ treat_class_rvalues_as_lvalues);
break;
case COND_EXPR:
- return (lvalue_p_1 (TREE_OPERAND (ref, 1),
- treat_class_rvalues_as_lvalues)
- && lvalue_p_1 (TREE_OPERAND (ref, 2),
- treat_class_rvalues_as_lvalues));
+ op1_lvalue_kind = lvalue_p_1 (TREE_OPERAND (ref, 1),
+ treat_class_rvalues_as_lvalues);
+ op2_lvalue_kind = lvalue_p_1 (TREE_OPERAND (ref, 2),
+ treat_class_rvalues_as_lvalues);
+ break;
case MODIFY_EXPR:
- return 1;
+ return clk_ordinary;
case COMPOUND_EXPR:
return lvalue_p_1 (TREE_OPERAND (ref, 1),
- treat_class_rvalues_as_lvalues);
-
- case MAX_EXPR:
- case MIN_EXPR:
- return (lvalue_p_1 (TREE_OPERAND (ref, 0),
- treat_class_rvalues_as_lvalues)
- && lvalue_p_1 (TREE_OPERAND (ref, 1),
- treat_class_rvalues_as_lvalues));
+ treat_class_rvalues_as_lvalues);
case TARGET_EXPR:
- return treat_class_rvalues_as_lvalues;
+ return treat_class_rvalues_as_lvalues ? clk_class : clk_none;
case CALL_EXPR:
- return (treat_class_rvalues_as_lvalues
- && IS_AGGR_TYPE (TREE_TYPE (ref)));
+ case VA_ARG_EXPR:
+ return ((treat_class_rvalues_as_lvalues
+ && IS_AGGR_TYPE (TREE_TYPE (ref)))
+ ? clk_class : clk_none);
case FUNCTION_DECL:
/* All functions (except non-static-member functions) are
lvalues. */
- return !DECL_NONSTATIC_MEMBER_FUNCTION_P (ref);
+ return (DECL_NONSTATIC_MEMBER_FUNCTION_P (ref)
+ ? clk_none : clk_ordinary);
default:
break;
}
- return 0;
+ /* If one operand is not an lvalue at all, then this expression is
+ not an lvalue. */
+ if (!op1_lvalue_kind || !op2_lvalue_kind)
+ return clk_none;
+
+ /* Otherwise, it's an lvalue, and it has all the odd properties
+ contributed by either operand. */
+ op1_lvalue_kind = op1_lvalue_kind | op2_lvalue_kind;
+ /* It's not an ordinary lvalue if it involves either a bit-field or
+ a class rvalue. */
+ if ((op1_lvalue_kind & ~clk_ordinary) != clk_none)
+ op1_lvalue_kind &= ~clk_ordinary;
+ return op1_lvalue_kind;
}
-/* Return nonzero if REF is an lvalue valid for this language.
- Lvalues can be assigned, unless they have TREE_READONLY, or unless
- they are FUNCTION_DECLs. Lvalues can have their address taken,
- unless they have DECL_REGISTER. */
+/* If REF is an lvalue, returns the kind of lvalue that REF is.
+ Otherwise, returns clk_none. Lvalues can be assigned, unless they
+ have TREE_READONLY, or unless they are FUNCTION_DECLs. Lvalues can
+ have their address taken, unless they have DECL_REGISTER. */
-int
+cp_lvalue_kind
real_lvalue_p (ref)
tree ref;
{
return lvalue_p_1 (ref, /*treat_class_rvalues_as_lvalues=*/0);
}
-/* This differs from real_lvalue_p in that class rvalues are considered
- lvalues. */
+/* This differs from real_lvalue_p in that class rvalues are
+ considered lvalues. */
int
lvalue_p (ref)
tree ref;
{
- return lvalue_p_1 (ref, /*treat_class_rvalues_as_lvalues=*/1);
+ return
+ (lvalue_p_1 (ref, /*treat_class_rvalues_as_lvalues=*/1) != clk_none);
}
/* Return nonzero if REF is an lvalue valid for this language;
return win;
}
+/* Build a TARGET_EXPR, initializing the DECL with the VALUE. */
+
+static tree
+build_target_expr (decl, value)
+ tree decl;
+ tree value;
+{
+ tree t;
+
+ t = build (TARGET_EXPR, TREE_TYPE (decl), decl, value,
+ maybe_build_cleanup (decl), NULL_TREE);
+ /* We always set TREE_SIDE_EFFECTS so that expand_expr does not
+ ignore the TARGET_EXPR. If there really turn out to be no
+ side-effects, then the optimizer should be able to get rid of
+ whatever code is generated anyhow. */
+ TREE_SIDE_EFFECTS (t) = 1;
+
+ return t;
+}
+
/* INIT is a CALL_EXPR which needs info about its target.
TYPE is the type that this initialization should appear to have.
tree type;
tree init;
{
+ tree fn;
tree slot;
tree rval;
+ /* Make sure that we're not trying to create an instance of an
+ abstract class. */
+ abstract_virtuals_error (NULL_TREE, type);
+
if (TREE_CODE (init) != CALL_EXPR && TREE_CODE (init) != AGGR_INIT_EXPR)
- return init;
+ return convert (type, init);
slot = build (VAR_DECL, type);
DECL_ARTIFICIAL (slot) = 1;
+ DECL_CONTEXT (slot) = current_function_decl;
layout_decl (slot, 0);
- rval = build (AGGR_INIT_EXPR, type,
- TREE_OPERAND (init, 0), TREE_OPERAND (init, 1), slot);
- TREE_SIDE_EFFECTS (rval) = 1;
- rval = build (TARGET_EXPR, type, slot, rval, NULL_TREE, NULL_TREE);
+
+ /* We split the CALL_EXPR into its function and its arguments here.
+ Then, in expand_expr, we put them back together. The reason for
+ this is that this expression might be a default argument
+ expression. In that case, we need a new temporary every time the
+ expression is used. That's what break_out_target_exprs does; it
+ replaces every AGGR_INIT_EXPR with a copy that uses a fresh
+ temporary slot. Then, expand_expr builds up a call-expression
+ using the new slot. */
+ fn = TREE_OPERAND (init, 0);
+ rval = build (AGGR_INIT_EXPR, type, fn, TREE_OPERAND (init, 1), slot);
TREE_SIDE_EFFECTS (rval) = 1;
+ AGGR_INIT_VIA_CTOR_P (rval)
+ = (TREE_CODE (fn) == ADDR_EXPR
+ && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
+ && DECL_CONSTRUCTOR_P (TREE_OPERAND (fn, 0)));
+ rval = build_target_expr (slot, rval);
return rval;
}
-/* Encapsulate the expression INIT in a TARGET_EXPR. */
+/* Buidl a TARGET_EXPR using INIT to initialize a new temporary of the
+ indicated TYPE. */
tree
-get_target_expr (init)
+build_target_expr_with_type (init, type)
tree init;
+ tree type;
{
tree slot;
tree rval;
- slot = build (VAR_DECL, TREE_TYPE (init));
+ slot = build (VAR_DECL, type);
DECL_ARTIFICIAL (slot) = 1;
+ DECL_CONTEXT (slot) = current_function_decl;
layout_decl (slot, 0);
- rval = build (TARGET_EXPR, TREE_TYPE (init), slot, init,
- NULL_TREE, NULL_TREE);
- TREE_SIDE_EFFECTS (rval) = 1;
+ rval = build_target_expr (slot, init);
return rval;
}
+/* Like build_target_expr_with_type, but use the type of INIT. */
+
+tree
+get_target_expr (init)
+ tree init;
+{
+ return build_target_expr_with_type (init, TREE_TYPE (init));
+}
+
/* Recursively search EXP for CALL_EXPRs that need cleanups and replace
these CALL_EXPRs with tree nodes that will perform the cleanups. */
}
\f
-extern struct obstack *current_obstack;
-extern struct obstack permanent_obstack, class_obstack;
-extern struct obstack *saveable_obstack;
-extern struct obstack *expression_obstack;
+extern struct obstack permanent_obstack;
/* Here is how primitive or already-canonicalized types' hash
codes are made. MUST BE CONSISTENT WITH tree.c !!! */
TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
TREE_TYPE (t) = rettype;
- if (IS_SIGNATURE (basetype))
- ptype = build_signature_pointer_type (basetype);
- else
- ptype = build_pointer_type (basetype);
+ ptype = build_pointer_type (basetype);
/* The actual arglist for this function includes a "hidden" argument
- which is "this". Put it into the list of argument types. */
-
+ which is "this". Put it into the list of argument types. Make
+ sure that the new argument list is allocated on the same obstack
+ as the type. */
argtypes = tree_cons (NULL_TREE, ptype, argtypes);
TYPE_ARG_TYPES (t) = argtypes;
TREE_SIDE_EFFECTS (argtypes) = 1; /* Mark first argtype as "artificial". */
/* If we already have such a type, use the old one and free this one.
Note that it also frees up the above cons cell if found. */
- hashcode = TYPE_HASH (basetype) + TYPE_HASH (rettype) + type_hash_list (argtypes);
+ hashcode = TYPE_HASH (basetype) + TYPE_HASH (rettype) +
+ type_hash_list (argtypes);
+
t = type_hash_canon (hashcode, t);
if (TYPE_SIZE (t) == 0)
tree elt_type;
tree index_type;
{
- register struct obstack *ambient_obstack = current_obstack;
- register struct obstack *ambient_saveable_obstack = saveable_obstack;
tree t;
- /* We need a new one. If both ELT_TYPE and INDEX_TYPE are permanent,
- make this permanent too. */
- if (TREE_PERMANENT (elt_type)
- && (index_type == 0 || TREE_PERMANENT (index_type)))
- {
- current_obstack = &permanent_obstack;
- saveable_obstack = &permanent_obstack;
- }
+ if (elt_type == error_mark_node || index_type == error_mark_node)
+ return error_mark_node;
if (processing_template_decl
|| uses_template_parms (elt_type)
/* Push these needs up so that initialization takes place
more easily. */
- TYPE_NEEDS_CONSTRUCTING (t) = TYPE_NEEDS_CONSTRUCTING (TYPE_MAIN_VARIANT (elt_type));
- TYPE_NEEDS_DESTRUCTOR (t) = TYPE_NEEDS_DESTRUCTOR (TYPE_MAIN_VARIANT (elt_type));
- current_obstack = ambient_obstack;
- saveable_obstack = ambient_saveable_obstack;
+ TYPE_NEEDS_CONSTRUCTING (t)
+ = TYPE_NEEDS_CONSTRUCTING (TYPE_MAIN_VARIANT (elt_type));
+ TYPE_NEEDS_DESTRUCTOR (t)
+ = TYPE_NEEDS_DESTRUCTOR (TYPE_MAIN_VARIANT (elt_type));
return t;
}
return t;
}
\f
-/* Make a variant type in the proper way for C/C++, propagating qualifiers
- down to the element type of an array. */
+/* Make a variant of TYPE, qualified with the TYPE_QUALS. Handles
+ arrays correctly. In particular, if TYPE is an array of T's, and
+ TYPE_QUALS is non-empty, returns an array of qualified T's. If
+ at attempt is made to qualify a type illegally, and COMPLAIN is
+ non-zero, an error is issued. If COMPLAIN is zero, error_mark_node
+ is returned. */
tree
-cp_build_qualified_type (type, type_quals)
+cp_build_qualified_type_real (type, type_quals, complain)
tree type;
int type_quals;
+ int complain;
{
+ tree result;
+
if (type == error_mark_node)
return type;
-
+
+ if (type_quals == TYPE_QUALS (type))
+ return type;
+
/* A restrict-qualified pointer type must be a pointer (or reference)
to object or incomplete type. */
if ((type_quals & TYPE_QUAL_RESTRICT)
+ && TREE_CODE (type) != TEMPLATE_TYPE_PARM
&& (!POINTER_TYPE_P (type)
|| TYPE_PTRMEM_P (type)
|| TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE))
{
- cp_error ("`%T' cannot be `restrict'-qualified", type);
+ if (complain)
+ cp_error ("`%T' cannot be `restrict'-qualified", type);
+ else
+ return error_mark_node;
+
type_quals &= ~TYPE_QUAL_RESTRICT;
}
- if (TREE_CODE (type) == ARRAY_TYPE)
+ if (type_quals != TYPE_UNQUALIFIED
+ && TREE_CODE (type) == FUNCTION_TYPE)
+ {
+ if (complain)
+ cp_error ("`%T' cannot be `const'-, `volatile'-, or `restrict'-qualified", type);
+ else
+ return error_mark_node;
+ type_quals = TYPE_UNQUALIFIED;
+ }
+ else if (TREE_CODE (type) == ARRAY_TYPE)
{
- tree real_main_variant = TYPE_MAIN_VARIANT (type);
+ /* In C++, the qualification really applies to the array element
+ type. Obtain the appropriately qualified element type. */
+ tree t;
+ tree element_type
+ = cp_build_qualified_type_real (TREE_TYPE (type),
+ type_quals,
+ complain);
- push_obstacks (TYPE_OBSTACK (real_main_variant),
- TYPE_OBSTACK (real_main_variant));
- type = build_cplus_array_type_1 (cp_build_qualified_type
- (TREE_TYPE (type), type_quals),
- TYPE_DOMAIN (type));
+ if (element_type == error_mark_node)
+ return error_mark_node;
- /* TYPE must be on same obstack as REAL_MAIN_VARIANT. If not,
- make a copy. (TYPE might have come from the hash table and
- REAL_MAIN_VARIANT might be in some function's obstack.) */
+ /* See if we already have an identically qualified type. */
+ for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
+ if (CP_TYPE_QUALS (t) == type_quals)
+ break;
- if (TYPE_OBSTACK (type) != TYPE_OBSTACK (real_main_variant))
+ /* If we didn't already have it, create it now. */
+ if (!t)
{
- type = copy_node (type);
- TYPE_POINTER_TO (type) = TYPE_REFERENCE_TO (type) = 0;
+ /* Make a new array type, just like the old one, but with the
+ appropriately qualified element type. */
+ t = build_type_copy (type);
+ TREE_TYPE (t) = element_type;
}
- TYPE_MAIN_VARIANT (type) = real_main_variant;
- pop_obstacks ();
- return type;
+ /* Even if we already had this variant, we update
+ TYPE_NEEDS_CONSTRUCTING and TYPE_NEEDS_DESTRUCTOR in case
+ they changed since the variant was originally created.
+
+ This seems hokey; if there is some way to use a previous
+ variant *without* coming through here,
+ TYPE_NEEDS_CONSTRUCTING will never be updated. */
+ TYPE_NEEDS_CONSTRUCTING (t)
+ = TYPE_NEEDS_CONSTRUCTING (TYPE_MAIN_VARIANT (element_type));
+ TYPE_NEEDS_DESTRUCTOR (t)
+ = TYPE_NEEDS_DESTRUCTOR (TYPE_MAIN_VARIANT (element_type));
+ return t;
+ }
+ else if (TYPE_PTRMEMFUNC_P (type))
+ {
+ /* For a pointer-to-member type, we can't just return a
+ cv-qualified version of the RECORD_TYPE. If we do, we
+ haven't change the field that contains the actual pointer to
+ a method, and so TYPE_PTRMEMFUNC_FN_TYPE will be wrong. */
+ tree t;
+
+ t = TYPE_PTRMEMFUNC_FN_TYPE (type);
+ t = cp_build_qualified_type_real (t, type_quals, complain);
+ return build_ptrmemfunc_type (t);
}
- return build_qualified_type (type, type_quals);
+
+ /* Retrieve (or create) the appropriately qualified variant. */
+ result = build_qualified_type (type, type_quals);
+
+ /* If this was a pointer-to-method type, and we just made a copy,
+ then we need to clear the cached associated
+ pointer-to-member-function type; it is not valid for the new
+ type. */
+ if (result != type
+ && TREE_CODE (type) == POINTER_TYPE
+ && TREE_CODE (TREE_TYPE (type)) == METHOD_TYPE)
+ TYPE_SET_PTRMEMFUNC_TYPE (result, NULL_TREE);
+
+ return result;
}
/* Returns the canonical version of TYPE. In other words, if TYPE is
return cp_build_qualified_type (TYPE_MAIN_VARIANT (t), CP_TYPE_QUALS (t));
}
\f
-/* Add OFFSET to all base types of T.
-
- OFFSET, which is a type offset, is number of bytes.
-
- Note that we don't have to worry about having two paths to the
- same base type, since this type owns its association list. */
-
-static void
-propagate_binfo_offsets (binfo, offset)
- tree binfo;
- tree offset;
-{
- tree binfos = BINFO_BASETYPES (binfo);
- int i, n_baselinks = binfos ? TREE_VEC_LENGTH (binfos) : 0;
-
- for (i = 0; i < n_baselinks; /* note increment is done in the loop. */)
- {
- tree base_binfo = TREE_VEC_ELT (binfos, i);
-
- if (TREE_VIA_VIRTUAL (base_binfo))
- i += 1;
- else
- {
- int j;
- tree delta = NULL_TREE;
-
- for (j = i+1; j < n_baselinks; j++)
- if (! TREE_VIA_VIRTUAL (TREE_VEC_ELT (binfos, j)))
- {
- /* The next basetype offset must take into account the space
- between the classes, not just the size of each class. */
- delta = size_binop (MINUS_EXPR,
- BINFO_OFFSET (TREE_VEC_ELT (binfos, j)),
- BINFO_OFFSET (base_binfo));
- break;
- }
-
-#if 0
- if (BINFO_OFFSET_ZEROP (base_binfo))
- BINFO_OFFSET (base_binfo) = offset;
- else
- BINFO_OFFSET (base_binfo)
- = size_binop (PLUS_EXPR, BINFO_OFFSET (base_binfo), offset);
-#else
- BINFO_OFFSET (base_binfo) = offset;
-#endif
-
- propagate_binfo_offsets (base_binfo, offset);
-
- /* Go to our next class that counts for offset propagation. */
- i = j;
- if (i < n_baselinks)
- offset = size_binop (PLUS_EXPR, offset, delta);
- }
- }
-}
-
/* Makes new binfos for the indirect bases under BINFO, and updates
BINFO_OFFSET for them and their bases. */
}
}
-/* Finish the work of layout_record, now taking virtual bases into account.
- Also compute the actual offsets that our base classes will have.
- This must be performed after the fields are laid out, since virtual
- baseclasses must lay down at the end of the record.
-
- Returns the maximum number of virtual functions any of the
- baseclasses provide. */
-
-int
-layout_basetypes (rec, max)
- tree rec;
- int max;
-{
- tree binfos = TYPE_BINFO_BASETYPES (rec);
- int i, n_baseclasses = binfos ? TREE_VEC_LENGTH (binfos) : 0;
-
- tree vbase_types;
-
- unsigned int record_align = MAX (BITS_PER_UNIT, TYPE_ALIGN (rec));
- unsigned int desired_align;
-
- /* Record size so far is CONST_SIZE bits, where CONST_SIZE is an integer. */
- register unsigned int const_size = 0;
- unsigned int nonvirtual_const_size;
-
-#ifdef STRUCTURE_SIZE_BOUNDARY
- /* Packed structures don't need to have minimum size. */
- if (! TYPE_PACKED (rec))
- record_align = MAX (record_align, STRUCTURE_SIZE_BOUNDARY);
-#endif
-
- /* Get all the virtual base types that this type uses. The
- TREE_VALUE slot holds the virtual baseclass type. Note that
- get_vbase_types makes copies of the virtual base BINFOs, so that
- the vbase_types are unshared. */
- vbase_types = CLASSTYPE_VBASECLASSES (rec);
-
- my_friendly_assert (TREE_CODE (TYPE_SIZE (rec)) == INTEGER_CST, 19970302);
- const_size = TREE_INT_CST_LOW (TYPE_SIZE (rec));
-
- nonvirtual_const_size = const_size;
-
- while (vbase_types)
- {
- tree basetype = BINFO_TYPE (vbase_types);
- tree offset;
-
- desired_align = TYPE_ALIGN (basetype);
- record_align = MAX (record_align, desired_align);
-
- if (const_size == 0)
- offset = integer_zero_node;
- else
- {
- /* Give each virtual base type the alignment it wants. */
- const_size = CEIL (const_size, desired_align) * desired_align;
- offset = size_int (CEIL (const_size, BITS_PER_UNIT));
- }
-
- if (CLASSTYPE_VSIZE (basetype) > max)
- max = CLASSTYPE_VSIZE (basetype);
- BINFO_OFFSET (vbase_types) = offset;
-
- /* Every virtual baseclass takes a least a UNIT, so that we can
- take it's address and get something different for each base. */
- const_size += MAX (BITS_PER_UNIT,
- TREE_INT_CST_LOW (CLASSTYPE_SIZE (basetype)));
-
- vbase_types = TREE_CHAIN (vbase_types);
- }
-
- if (const_size)
- {
- /* Because a virtual base might take a single byte above,
- we have to re-adjust the total size to make sure it is
- a multiple of the alignment. */
- /* Give the whole object the alignment it wants. */
- const_size = CEIL (const_size, record_align) * record_align;
- }
-
- /* Set the alignment in the complete type. We don't set CLASSTYPE_ALIGN
- here, as that is for this class, without any virtual base classes. */
- TYPE_ALIGN (rec) = record_align;
- if (const_size != nonvirtual_const_size)
- {
- TYPE_SIZE (rec) = size_int (const_size);
- TYPE_SIZE_UNIT (rec) = size_binop (FLOOR_DIV_EXPR, TYPE_SIZE (rec),
- size_int (BITS_PER_UNIT));
- }
-
- /* Now propagate offset information throughout the lattice. */
- for (i = 0; i < n_baseclasses; i++)
- {
- register tree base_binfo = TREE_VEC_ELT (binfos, i);
- register tree basetype = BINFO_TYPE (base_binfo);
- tree field = TYPE_FIELDS (rec);
-
- if (TREE_VIA_VIRTUAL (base_binfo))
- continue;
-
- my_friendly_assert (TREE_TYPE (field) == basetype, 23897);
-
- if (get_base_distance (basetype, rec, 0, (tree*)0) == -2)
- cp_warning ("direct base `%T' inaccessible in `%T' due to ambiguity",
- basetype, rec);
-
- BINFO_OFFSET (base_binfo)
- = size_int (CEIL (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field)),
- BITS_PER_UNIT));
- propagate_binfo_offsets (base_binfo, BINFO_OFFSET (base_binfo));
- TYPE_FIELDS (rec) = TREE_CHAIN (field);
- }
-
- for (vbase_types = CLASSTYPE_VBASECLASSES (rec); vbase_types;
- vbase_types = TREE_CHAIN (vbase_types))
- {
- BINFO_INHERITANCE_CHAIN (vbase_types) = TYPE_BINFO (rec);
- unshare_base_binfos (vbase_types);
- propagate_binfo_offsets (vbase_types, BINFO_OFFSET (vbase_types));
-
- if (extra_warnings)
- {
- tree basetype = BINFO_TYPE (vbase_types);
- if (get_base_distance (basetype, rec, 0, (tree*)0) == -2)
- cp_warning ("virtual base `%T' inaccessible in `%T' due to ambiguity",
- basetype, rec);
- }
- }
-
- return max;
-}
-
-/* If the empty base field in DECL overlaps with a base of the same type in
- NEWDECL, which is either another base field or the first data field of
- the class, pad the base just before NEWDECL and return 1. Otherwise,
- return 0. */
-
-static int
-avoid_overlap (decl, newdecl)
- tree decl, newdecl;
-{
- tree field;
-
- if (newdecl == NULL_TREE
- || ! types_overlap_p (TREE_TYPE (decl), TREE_TYPE (newdecl)))
- return 0;
-
- for (field = decl; TREE_CHAIN (field) && TREE_CHAIN (field) != newdecl;
- field = TREE_CHAIN (field))
- ;
-
- DECL_SIZE (field) = integer_one_node;
-
- return 1;
-}
-
-/* Returns a list of fields to stand in for the base class subobjects
- of REC. These fields are later removed by layout_basetypes. */
-
-tree
-build_base_fields (rec)
- tree rec;
-{
- /* Chain to hold all the new FIELD_DECLs which stand in for base class
- subobjects. */
- tree base_decls = NULL_TREE;
- tree binfos = TYPE_BINFO_BASETYPES (rec);
- int n_baseclasses = binfos ? TREE_VEC_LENGTH (binfos) : 0;
- tree decl, nextdecl;
- int i, saw_empty = 0;
- unsigned int base_align = 0;
-
- for (i = 0; i < n_baseclasses; ++i)
- {
- register tree base_binfo = TREE_VEC_ELT (binfos, i);
- register tree basetype = BINFO_TYPE (base_binfo);
-
- if (TYPE_SIZE (basetype) == 0)
- /* This error is now reported in xref_tag, thus giving better
- location information. */
- continue;
-
- if (TREE_VIA_VIRTUAL (base_binfo))
- continue;
-
- decl = build_lang_field_decl (FIELD_DECL, NULL_TREE, basetype);
- DECL_ARTIFICIAL (decl) = 1;
- DECL_FIELD_CONTEXT (decl) = DECL_CLASS_CONTEXT (decl) = rec;
- DECL_SIZE (decl) = CLASSTYPE_SIZE (basetype);
- DECL_ALIGN (decl) = CLASSTYPE_ALIGN (basetype);
- TREE_CHAIN (decl) = base_decls;
- base_decls = decl;
-
- if (! flag_new_abi)
- {
- /* Brain damage for backwards compatibility. For no good reason,
- the old layout_basetypes made every base at least as large as
- the alignment for the bases up to that point, gratuitously
- wasting space. So we do the same thing here. */
- base_align = MAX (base_align, DECL_ALIGN (decl));
- DECL_SIZE (decl)
- = size_int (MAX (TREE_INT_CST_LOW (DECL_SIZE (decl)),
- (int) base_align));
- }
- else if (DECL_SIZE (decl) == integer_zero_node)
- saw_empty = 1;
- }
-
- /* Reverse the list of fields so we allocate the bases in the proper
- order. */
- base_decls = nreverse (base_decls);
-
- /* In the presence of empty base classes, we run the risk of allocating
- two objects of the same class on top of one another. Avoid that. */
- if (flag_new_abi && saw_empty)
- for (decl = base_decls; decl; decl = TREE_CHAIN (decl))
- {
- if (DECL_SIZE (decl) == integer_zero_node)
- {
- /* First step through the following bases until we find
- an overlap or a non-empty base. */
- for (nextdecl = TREE_CHAIN (decl); nextdecl;
- nextdecl = TREE_CHAIN (nextdecl))
- {
- if (avoid_overlap (decl, nextdecl)
- || DECL_SIZE (nextdecl) != integer_zero_node)
- goto nextbase;
- }
-
- /* If we're still looking, also check against the first
- field. */
- for (nextdecl = TYPE_FIELDS (rec);
- nextdecl && TREE_CODE (nextdecl) != FIELD_DECL;
- nextdecl = TREE_CHAIN (nextdecl))
- /* keep looking */;
- avoid_overlap (decl, nextdecl);
- }
- nextbase:;
- }
-
- return base_decls;
-}
-
-/* Returns list of virtual base class pointers in a FIELD_DECL chain. */
-
-tree
-build_vbase_pointer_fields (rec)
- tree rec;
-{
- /* Chain to hold all the new FIELD_DECLs which point at virtual
- base classes. */
- tree vbase_decls = NULL_TREE;
- tree binfos = TYPE_BINFO_BASETYPES (rec);
- int n_baseclasses = binfos ? TREE_VEC_LENGTH (binfos) : 0;
- tree decl;
- int i;
-
- /* Handle basetypes almost like fields, but record their
- offsets differently. */
-
- for (i = 0; i < n_baseclasses; i++)
- {
- register tree base_binfo = TREE_VEC_ELT (binfos, i);
- register tree basetype = BINFO_TYPE (base_binfo);
-
- if (TYPE_SIZE (basetype) == 0)
- /* This error is now reported in xref_tag, thus giving better
- location information. */
- continue;
-
- /* All basetypes are recorded in the association list of the
- derived type. */
-
- if (TREE_VIA_VIRTUAL (base_binfo))
- {
- int j;
- const char *name;
-
- /* The offset for a virtual base class is only used in computing
- virtual function tables and for initializing virtual base
- pointers. It is built once `get_vbase_types' is called. */
-
- /* If this basetype can come from another vbase pointer
- without an additional indirection, we will share
- that pointer. If an indirection is involved, we
- make our own pointer. */
- for (j = 0; j < n_baseclasses; j++)
- {
- tree other_base_binfo = TREE_VEC_ELT (binfos, j);
- if (! TREE_VIA_VIRTUAL (other_base_binfo)
- && binfo_member (basetype,
- CLASSTYPE_VBASECLASSES (BINFO_TYPE
- (other_base_binfo))
- ))
- goto got_it;
- }
- FORMAT_VBASE_NAME (name, basetype);
- decl = build_lang_field_decl (FIELD_DECL, get_identifier (name),
- build_pointer_type (basetype));
- /* If you change any of the below, take a look at all the
- other VFIELD_BASEs and VTABLE_BASEs in the code, and change
- them too. */
- DECL_ASSEMBLER_NAME (decl) = get_identifier (VTABLE_BASE);
- DECL_VIRTUAL_P (decl) = 1;
- DECL_ARTIFICIAL (decl) = 1;
- DECL_FIELD_CONTEXT (decl) = rec;
- DECL_CLASS_CONTEXT (decl) = rec;
- DECL_FCONTEXT (decl) = basetype;
- DECL_SAVED_INSNS (decl) = NULL_RTX;
- DECL_FIELD_SIZE (decl) = 0;
- DECL_ALIGN (decl) = TYPE_ALIGN (ptr_type_node);
- TREE_CHAIN (decl) = vbase_decls;
- BINFO_VPTR_FIELD (base_binfo) = decl;
- vbase_decls = decl;
-
- got_it:
- /* The space this decl occupies has already been accounted for. */
- ;
- }
- }
-
- return vbase_decls;
-}
\f
/* Hashing of lists so that we don't make duplicates.
The entry point is `list_hash_canon'. */
{
register struct list_hash *h;
- h = (struct list_hash *) obstack_alloc (&class_obstack, sizeof (struct list_hash));
+ h = (struct list_hash *) obstack_alloc (&permanent_obstack, sizeof (struct list_hash));
h->hashcode = hashcode;
h->list = list;
h->next = list_hash_table[hashcode % TYPE_HASH_SIZE];
hash_tree_cons (purpose, value, chain)
tree purpose, value, chain;
{
- struct obstack *ambient_obstack = current_obstack;
tree t;
int hashcode = 0;
return t;
}
- current_obstack = &class_obstack;
-
t = tree_cons (purpose, value, chain);
/* If this is a new list, record it for later reuse. */
if (! debug_no_list_hash)
list_hash_add (hashcode, t);
- current_obstack = ambient_obstack;
return t;
}
BINFO_OFFSET (new_binfo) = offset;
BINFO_VTABLE (new_binfo) = vtable;
BINFO_VIRTUALS (new_binfo) = virtuals;
- BINFO_VPTR_FIELD (new_binfo) = NULL_TREE;
if (binfo && BINFO_BASETYPES (binfo) != NULL_TREE)
BINFO_BASETYPES (new_binfo) = copy_node (BINFO_BASETYPES (binfo));
tree path;
{
register tree prev = NULL_TREE, cur;
- push_expression_obstack ();
for (cur = path; cur; cur = BINFO_INHERITANCE_CHAIN (cur))
{
tree r = copy_node (cur);
BINFO_INHERITANCE_CHAIN (r) = prev;
prev = r;
}
- pop_obstacks ();
return prev;
}
else
fprintf (stderr, "no vtable decl yet\n");
fprintf (stderr, "virtuals:\n");
- virtuals = BINFO_VIRTUALS (elem);
-
- n = skip_rtti_stuff (&virtuals, BINFO_TYPE (elem));
+ virtuals = skip_rtti_stuff (elem, BINFO_TYPE (elem), &n);
while (virtuals)
{
- tree fndecl = TREE_OPERAND (FNADDR_FROM_VTABLE_ENTRY (TREE_VALUE (virtuals)), 0);
+ tree fndecl = TREE_VALUE (virtuals);
fprintf (stderr, "%s [%ld =? %ld]\n",
IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (fndecl)),
(long) n, (long) TREE_INT_CST_LOW (DECL_VINDEX (fndecl)));
}
}
-/* Initialize an CPLUS_BINDING node that does not live on an obstack. */
-
-tree
-binding_init (node)
- struct tree_binding* node;
-{
- static struct tree_binding* source;
- if (!source)
- {
- extern struct obstack permanent_obstack;
- push_obstacks (&permanent_obstack, &permanent_obstack);
- source = (struct tree_binding*)make_node (CPLUS_BINDING);
- pop_obstacks ();
- }
- *node = *source;
- TREE_PERMANENT ((tree)node) = 0;
- return (tree)node;
-}
-
int
count_functions (t)
tree t;
return result;
}
-/* Same as ovl_cons, but on the scratch_obstack. */
-
-tree
-scratch_ovl_cons (value, chain)
- tree value, chain;
-{
- register tree node;
- register struct obstack *ambient_obstack = current_obstack;
- extern struct obstack *expression_obstack;
- current_obstack = expression_obstack;
- node = ovl_cons (value, chain);
- current_obstack = ambient_obstack;
- return node;
-}
-
/* Build a new overloaded function. If this is the first one,
just return it; otherwise, ovl_cons the _DECLs */
return ovl_cons (decl, chain);
}
-/* Returns true iff functions are equivalent. Equivalent functions are
- not identical only if one is a function-local extern function.
- This assumes that function-locals don't have TREE_PERMANENT. */
-
-static int
-equal_functions (fn1, fn2)
- tree fn1;
- tree fn2;
-{
- if (!TREE_PERMANENT (fn1) || !TREE_PERMANENT (fn2))
- return decls_match (fn1, fn2);
- return fn1 == fn2;
-}
-
/* True if fn is in ovl. */
int
if (ovl == NULL_TREE)
return 0;
if (TREE_CODE (ovl) != OVERLOAD)
- return equal_functions (ovl, fn);
+ return ovl == fn;
for (; ovl; ovl = OVL_CHAIN (ovl))
- if (equal_functions (OVL_FUNCTION (ovl), fn))
+ if (OVL_FUNCTION (ovl) == fn)
return 1;
return 0;
}
return 0;
return IS_AGGR_TYPE (t1) && IS_AGGR_TYPE (t2);
}
+
+/* Returns non-zero if CODE is the code for a statement. */
+
+static int
+statement_code_p (code)
+ enum tree_code code;
+{
+ switch (code)
+ {
+ case EXPR_STMT:
+ case COMPOUND_STMT:
+ case DECL_STMT:
+ case IF_STMT:
+ case FOR_STMT:
+ case WHILE_STMT:
+ case DO_STMT:
+ case RETURN_STMT:
+ case BREAK_STMT:
+ case CONTINUE_STMT:
+ case SWITCH_STMT:
+ case GOTO_STMT:
+ case LABEL_STMT:
+ case ASM_STMT:
+ case SUBOBJECT:
+ case CLEANUP_STMT:
+ case START_CATCH_STMT:
+ case CTOR_STMT:
+ case SCOPE_STMT:
+ case CTOR_INITIALIZER:
+ case CASE_LABEL:
+ case RETURN_INIT:
+ case TRY_BLOCK:
+ case HANDLER:
+ return 1;
+
+ default:
+ return 0;
+ }
+}
\f
#define PRINT_RING_SIZE 4
-char *
+const char *
lang_printable_name (decl, v)
tree decl;
int v;
int type_quals = TYPE_QUALS (type);
for (; v; v = TYPE_NEXT_VARIANT (v))
- {
- tree t;
- tree u;
-
- if (TYPE_QUALS (v) != type_quals)
- continue;
-
- for (t = TYPE_RAISES_EXCEPTIONS (v), u = raises;
- t != NULL_TREE && u != NULL_TREE;
- t = TREE_CHAIN (t), u = TREE_CHAIN (u))
- if (((TREE_VALUE (t) != NULL_TREE)
- != (TREE_VALUE (u) != NULL_TREE))
- || !same_type_p (TREE_VALUE (t), TREE_VALUE (u)))
- break;
-
- if (!t && !u)
- /* There's a memory leak here; RAISES is not freed. */
- return v;
- }
+ if (TYPE_QUALS (v) == type_quals
+ && comp_except_specs (raises, TYPE_RAISES_EXCEPTIONS (v), 1))
+ return v;
/* Need to build a new variant. */
v = build_type_copy (type);
-
- if (raises && ! TREE_PERMANENT (raises))
- raises = copy_to_permanent (raises);
-
TYPE_RAISES_EXCEPTIONS (v) = raises;
return v;
}
tree template = TYPE_NAME (t);
tree t2;
- /* Make sure these end up on the permanent_obstack. */
- push_obstacks_nochange ();
- end_temporary_allocation ();
-
- t2 = make_lang_type (TEMPLATE_TEMPLATE_PARM);
+ t2 = make_aggr_type (TEMPLATE_TEMPLATE_PARM);
template = copy_node (template);
copy_lang_decl (template);
- pop_obstacks ();
-
TREE_TYPE (template) = t2;
TYPE_NAME (t2) = template;
TYPE_STUB_DECL (t2) = template;
return t2;
}
-/* Walk through the tree structure T, applying func. If func ever returns
- non-null, return that value. */
+/* Apply FUNC to all the sub-trees of TP in a pre-order traversal.
+ FUNC is called with the DATA and the address of each sub-tree. If
+ FUNC returns a non-NULL value, the traversal is aborted, and the
+ value returned by FUNC is returned. */
-tree
-search_tree (t, func)
- tree t;
- tree (*func) PROTO((tree));
+tree
+walk_tree (tp, func, data)
+ tree *tp;
+ walk_tree_fn func;
+ void *data;
{
-#define TRY(ARG) if (tmp=search_tree (ARG, func), tmp != NULL_TREE) return tmp
+ enum tree_code code;
+ int walk_subtrees;
+ tree result;
+
+#define WALK_SUBTREE(NODE) \
+ do \
+ { \
+ result = walk_tree (&(NODE), func, data); \
+ if (result) \
+ return result; \
+ } \
+ while (0)
+
+ /* Skip empty subtrees. */
+ if (!*tp)
+ return NULL_TREE;
+
+ /* Call the function. */
+ walk_subtrees = 1;
+ result = (*func) (tp, &walk_subtrees, data);
+
+ /* If we found something, return it. */
+ if (result)
+ return result;
+
+ /* Even if we didn't, FUNC may have decided that there was nothing
+ interesting below this point in the tree. */
+ if (!walk_subtrees)
+ return NULL_TREE;
+
+ code = TREE_CODE (*tp);
+
+ /* Handle common cases up front. */
+ if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))
+ || TREE_CODE_CLASS (code) == 'r'
+ || TREE_CODE_CLASS (code) == 's')
+ {
+ int i, len;
+
+ /* Walk over all the sub-trees of this operand. */
+ len = first_rtl_op (code);
+ /* TARGET_EXPRs are peculiar: operands 1 and 3 can be the same.
+ But, we only want to walk once. */
+ if (code == TARGET_EXPR
+ && TREE_OPERAND (*tp, 3) == TREE_OPERAND (*tp, 1))
+ --len;
+ /* Go through the subtrees. We need to do this in forward order so
+ that the scope of a FOR_EXPR is handled properly. */
+ for (i = 0; i < len; ++i)
+ WALK_SUBTREE (TREE_OPERAND (*tp, i));
+
+ /* For statements, we also walk the chain so that we cover the
+ entire statement tree. */
+ if (statement_code_p (code))
+ {
+ if (code == DECL_STMT
+ && DECL_STMT_DECL (*tp)
+ && TREE_CODE_CLASS (TREE_CODE (DECL_STMT_DECL (*tp))) == 'd')
+ {
+ /* Walk the DECL_INITIAL and DECL_SIZE. We don't want to walk
+ into declarations that are just mentioned, rather than
+ declared; they don't really belong to this part of the tree.
+ And, we can see cycles: the initializer for a declaration can
+ refer to the declaration itself. */
+ WALK_SUBTREE (DECL_INITIAL (DECL_STMT_DECL (*tp)));
+ WALK_SUBTREE (DECL_SIZE (DECL_STMT_DECL (*tp)));
+ }
- tree tmp;
+ WALK_SUBTREE (TREE_CHAIN (*tp));
+ }
- if (t == NULL_TREE)
- return t;
+ /* We didn't find what we were looking for. */
+ return NULL_TREE;
+ }
+ else if (TREE_CODE_CLASS (code) == 'd')
+ {
+ WALK_SUBTREE (TREE_TYPE (*tp));
- if (tmp = func (t), tmp != NULL_TREE)
- return tmp;
+ /* We didn't find what we were looking for. */
+ return NULL_TREE;
+ }
- switch (TREE_CODE (t))
+ /* Not one of the easy cases. We must explicitly go through the
+ children. */
+ switch (code)
{
case ERROR_MARK:
- break;
-
case IDENTIFIER_NODE:
+ case INTEGER_CST:
+ case REAL_CST:
+ case STRING_CST:
+ case DEFAULT_ARG:
+ case TEMPLATE_TEMPLATE_PARM:
+ case TEMPLATE_PARM_INDEX:
+ case TEMPLATE_TYPE_PARM:
+ case REAL_TYPE:
+ case COMPLEX_TYPE:
+ case VOID_TYPE:
+ case BOOLEAN_TYPE:
+ case TYPENAME_TYPE:
+ case UNION_TYPE:
+ case ENUMERAL_TYPE:
+ case TYPEOF_TYPE:
+ case BLOCK:
+ /* None of thse have subtrees other than those already walked
+ above. */
break;
- case VAR_DECL:
- case FUNCTION_DECL:
- case CONST_DECL:
- case TEMPLATE_DECL:
- case NAMESPACE_DECL:
- break;
-
- case TYPE_DECL:
- TRY (TREE_TYPE (t));
+ case PTRMEM_CST:
+ WALK_SUBTREE (TREE_TYPE (*tp));
break;
- case PARM_DECL:
- TRY (TREE_TYPE (t));
- TRY (TREE_CHAIN (t));
+ case POINTER_TYPE:
+ case REFERENCE_TYPE:
+ WALK_SUBTREE (TREE_TYPE (*tp));
break;
case TREE_LIST:
- TRY (TREE_PURPOSE (t));
- TRY (TREE_VALUE (t));
- TRY (TREE_CHAIN (t));
+ WALK_SUBTREE (TREE_PURPOSE (*tp));
+ WALK_SUBTREE (TREE_VALUE (*tp));
+ WALK_SUBTREE (TREE_CHAIN (*tp));
break;
case OVERLOAD:
- TRY (OVL_FUNCTION (t));
- TRY (OVL_CHAIN (t));
+ WALK_SUBTREE (OVL_FUNCTION (*tp));
+ WALK_SUBTREE (OVL_CHAIN (*tp));
break;
case TREE_VEC:
{
- int len = TREE_VEC_LENGTH (t);
-
- t = copy_node (t);
+ int len = TREE_VEC_LENGTH (*tp);
while (len--)
- TRY (TREE_VEC_ELT (t, len));
+ WALK_SUBTREE (TREE_VEC_ELT (*tp, len));
}
break;
- case INTEGER_CST:
- case REAL_CST:
- case STRING_CST:
- case DEFAULT_ARG:
- break;
-
- case PTRMEM_CST:
- TRY (TREE_TYPE (t));
- break;
-
- case COND_EXPR:
- case TARGET_EXPR:
- case AGGR_INIT_EXPR:
- case NEW_EXPR:
- TRY (TREE_OPERAND (t, 0));
- TRY (TREE_OPERAND (t, 1));
- TRY (TREE_OPERAND (t, 2));
- break;
-
- case MODIFY_EXPR:
- case PLUS_EXPR:
- case MINUS_EXPR:
- case MULT_EXPR:
- case TRUNC_DIV_EXPR:
- case TRUNC_MOD_EXPR:
- case MIN_EXPR:
- case MAX_EXPR:
- case LSHIFT_EXPR:
- case RSHIFT_EXPR:
- case BIT_IOR_EXPR:
- case BIT_XOR_EXPR:
- case BIT_AND_EXPR:
- case BIT_ANDTC_EXPR:
- case TRUTH_ANDIF_EXPR:
- case TRUTH_ORIF_EXPR:
- case LT_EXPR:
- case LE_EXPR:
- case GT_EXPR:
- case GE_EXPR:
- case EQ_EXPR:
- case NE_EXPR:
- case CEIL_DIV_EXPR:
- case FLOOR_DIV_EXPR:
- case ROUND_DIV_EXPR:
- case CEIL_MOD_EXPR:
- case FLOOR_MOD_EXPR:
- case ROUND_MOD_EXPR:
- case COMPOUND_EXPR:
- case PREDECREMENT_EXPR:
- case PREINCREMENT_EXPR:
- case POSTDECREMENT_EXPR:
- case POSTINCREMENT_EXPR:
- case ARRAY_REF:
- case SCOPE_REF:
- case TRY_CATCH_EXPR:
- case WITH_CLEANUP_EXPR:
- case CALL_EXPR:
- TRY (TREE_OPERAND (t, 0));
- TRY (TREE_OPERAND (t, 1));
- break;
-
- case SAVE_EXPR:
- case CONVERT_EXPR:
- case ADDR_EXPR:
- case INDIRECT_REF:
- case NEGATE_EXPR:
- case BIT_NOT_EXPR:
- case TRUTH_NOT_EXPR:
- case NOP_EXPR:
- case NON_LVALUE_EXPR:
- case COMPONENT_REF:
- case CLEANUP_POINT_EXPR:
- case LOOKUP_EXPR:
- case SIZEOF_EXPR:
- case ALIGNOF_EXPR:
- TRY (TREE_OPERAND (t, 0));
- break;
-
- case MODOP_EXPR:
- case CAST_EXPR:
- case REINTERPRET_CAST_EXPR:
- case CONST_CAST_EXPR:
- case STATIC_CAST_EXPR:
- case DYNAMIC_CAST_EXPR:
- case ARROW_EXPR:
- case DOTSTAR_EXPR:
- case TYPEID_EXPR:
- break;
-
case COMPLEX_CST:
- TRY (TREE_REALPART (t));
- TRY (TREE_IMAGPART (t));
+ WALK_SUBTREE (TREE_REALPART (*tp));
+ WALK_SUBTREE (TREE_IMAGPART (*tp));
break;
case CONSTRUCTOR:
- TRY (CONSTRUCTOR_ELTS (t));
+ WALK_SUBTREE (CONSTRUCTOR_ELTS (*tp));
break;
- case TEMPLATE_TEMPLATE_PARM:
- case TEMPLATE_PARM_INDEX:
- case TEMPLATE_TYPE_PARM:
- break;
-
- case BIND_EXPR:
- break;
-
- case REAL_TYPE:
- case COMPLEX_TYPE:
- case VOID_TYPE:
- case BOOLEAN_TYPE:
- case TYPENAME_TYPE:
- case UNION_TYPE:
- case ENUMERAL_TYPE:
- case TYPEOF_TYPE:
- break;
-
- case POINTER_TYPE:
- case REFERENCE_TYPE:
- TRY (TREE_TYPE (t));
- break;
+ case METHOD_TYPE:
+ WALK_SUBTREE (TYPE_METHOD_BASETYPE (*tp));
+ /* Fall through. */
case FUNCTION_TYPE:
- case METHOD_TYPE:
- TRY (TREE_TYPE (t));
- TRY (TYPE_ARG_TYPES (t));
+ WALK_SUBTREE (TREE_TYPE (*tp));
+ WALK_SUBTREE (TYPE_ARG_TYPES (*tp));
break;
case ARRAY_TYPE:
- TRY (TREE_TYPE (t));
- TRY (TYPE_DOMAIN (t));
+ WALK_SUBTREE (TREE_TYPE (*tp));
+ WALK_SUBTREE (TYPE_DOMAIN (*tp));
break;
case INTEGER_TYPE:
- TRY (TYPE_MAX_VALUE (t));
+ WALK_SUBTREE (TYPE_MIN_VALUE (*tp));
+ WALK_SUBTREE (TYPE_MAX_VALUE (*tp));
break;
case OFFSET_TYPE:
- TRY (TREE_TYPE (t));
- TRY (TYPE_OFFSET_BASETYPE (t));
+ WALK_SUBTREE (TREE_TYPE (*tp));
+ WALK_SUBTREE (TYPE_OFFSET_BASETYPE (*tp));
break;
case RECORD_TYPE:
- if (TYPE_PTRMEMFUNC_P (t))
- TRY (TYPE_PTRMEMFUNC_FN_TYPE (t));
+ if (TYPE_PTRMEMFUNC_P (*tp))
+ WALK_SUBTREE (TYPE_PTRMEMFUNC_FN_TYPE (*tp));
break;
-
- /* This list is incomplete, but should suffice for now.
- It is very important that `sorry' not call
- `report_error_function'. That could cause an infinite loop. */
- default:
- sorry ("initializer contains unrecognized tree code");
- return error_mark_node;
+ default:
+ my_friendly_abort (19990803);
}
+ /* We didn't find what we were looking for. */
return NULL_TREE;
-#undef TRY
+#undef WALK_SUBTREE
}
-/* Passed to search_tree. Checks for the use of types with no linkage. */
+/* Passed to walk_tree. Checks for the use of types with no linkage. */
static tree
-no_linkage_helper (t)
- tree t;
+no_linkage_helper (tp, walk_subtrees, data)
+ tree *tp;
+ int *walk_subtrees ATTRIBUTE_UNUSED;
+ void *data ATTRIBUTE_UNUSED;
{
+ tree t = *tp;
+
if (TYPE_P (t)
&& (IS_AGGR_TYPE (t) || TREE_CODE (t) == ENUMERAL_TYPE)
&& (decl_function_context (TYPE_MAIN_DECL (t))
no_linkage_check (t)
tree t;
{
- t = search_tree (t, no_linkage_helper);
+ /* There's no point in checking linkage on template functions; we
+ can't know their complete types. */
+ if (processing_template_decl)
+ return NULL_TREE;
+
+ t = walk_tree (&t, no_linkage_helper, NULL);
if (t != error_mark_node)
return t;
return NULL_TREE;
}
-
-/* Subroutine of copy_to_permanent
-
- Assuming T is a node build bottom-up, make it all exist on
- permanent obstack, if it is not permanent already. */
+/* Passed to walk_tree. Copies the node pointed to, if appropriate. */
tree
-mapcar (t, func)
- tree t;
- tree (*func) PROTO((tree));
-{
- tree tmp;
-
- if (t == NULL_TREE)
- return t;
-
- if (tmp = func (t), tmp != NULL_TREE)
- return tmp;
-
- switch (TREE_CODE (t))
+copy_tree_r (tp, walk_subtrees, data)
+ tree *tp;
+ int *walk_subtrees;
+ void *data ATTRIBUTE_UNUSED;
+{
+ enum tree_code code = TREE_CODE (*tp);
+
+ /* We make copies of most nodes. */
+ if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code))
+ || TREE_CODE_CLASS (code) == 'r'
+ || TREE_CODE_CLASS (code) == 'c'
+ || TREE_CODE_CLASS (code) == 's'
+ || code == PARM_DECL
+ || code == TREE_LIST
+ || code == TREE_VEC
+ || code == OVERLOAD)
{
- case ERROR_MARK:
- return error_mark_node;
-
- case VAR_DECL:
- case FUNCTION_DECL:
- case CONST_DECL:
- /* Rather than aborting, return error_mark_node. This allows us
- to report a sensible error message on code like this:
-
- void g() { int i; f<i>(7); }
-
- In a case like:
-
- void g() { const int i = 7; f<i>(7); }
-
- however, we must actually return the constant initializer. */
- if (TREE_READONLY_DECL_P (t))
- {
- tmp = decl_constant_value (t);
- if (tmp != t)
- return mapcar (tmp, func);
- }
- return error_mark_node;
-
- case PARM_DECL:
- {
- tree chain = TREE_CHAIN (t);
- t = copy_node (t);
- TREE_CHAIN (t) = mapcar (chain, func);
- TREE_TYPE (t) = mapcar (TREE_TYPE (t), func);
- DECL_INITIAL (t) = mapcar (DECL_INITIAL (t), func);
- DECL_SIZE (t) = mapcar (DECL_SIZE (t), func);
- return t;
- }
-
- case TREE_LIST:
- {
- tree chain = TREE_CHAIN (t);
- t = copy_node (t);
- TREE_PURPOSE (t) = mapcar (TREE_PURPOSE (t), func);
- TREE_VALUE (t) = mapcar (TREE_VALUE (t), func);
- TREE_CHAIN (t) = mapcar (chain, func);
- return t;
- }
-
- case OVERLOAD:
- {
- tree chain = OVL_CHAIN (t);
- t = copy_node (t);
- OVL_FUNCTION (t) = mapcar (OVL_FUNCTION (t), func);
- OVL_CHAIN (t) = mapcar (chain, func);
- return t;
- }
-
- case TREE_VEC:
- {
- int len = TREE_VEC_LENGTH (t);
-
- t = copy_node (t);
- while (len--)
- TREE_VEC_ELT (t, len) = mapcar (TREE_VEC_ELT (t, len), func);
- return t;
- }
-
- case INTEGER_CST:
- case REAL_CST:
- case STRING_CST:
- return copy_node (t);
-
- case PTRMEM_CST:
- t = copy_node (t);
- TREE_TYPE (t) = mapcar (TREE_TYPE (t), func);
- PTRMEM_CST_MEMBER (t) = mapcar (PTRMEM_CST_MEMBER (t), func);
- return t;
-
- case COND_EXPR:
- case TARGET_EXPR:
- case AGGR_INIT_EXPR:
- t = copy_node (t);
- TREE_OPERAND (t, 0) = mapcar (TREE_OPERAND (t, 0), func);
- TREE_OPERAND (t, 1) = mapcar (TREE_OPERAND (t, 1), func);
- TREE_OPERAND (t, 2) = mapcar (TREE_OPERAND (t, 2), func);
- return t;
-
- case SAVE_EXPR:
- t = copy_node (t);
- TREE_OPERAND (t, 0) = mapcar (TREE_OPERAND (t, 0), func);
- return t;
-
- case MODIFY_EXPR:
- case PLUS_EXPR:
- case MINUS_EXPR:
- case MULT_EXPR:
- case TRUNC_DIV_EXPR:
- case TRUNC_MOD_EXPR:
- case MIN_EXPR:
- case MAX_EXPR:
- case LSHIFT_EXPR:
- case RSHIFT_EXPR:
- case BIT_IOR_EXPR:
- case BIT_XOR_EXPR:
- case BIT_AND_EXPR:
- case BIT_ANDTC_EXPR:
- case TRUTH_ANDIF_EXPR:
- case TRUTH_ORIF_EXPR:
- case LT_EXPR:
- case LE_EXPR:
- case GT_EXPR:
- case GE_EXPR:
- case EQ_EXPR:
- case NE_EXPR:
- case CEIL_DIV_EXPR:
- case FLOOR_DIV_EXPR:
- case ROUND_DIV_EXPR:
- case CEIL_MOD_EXPR:
- case FLOOR_MOD_EXPR:
- case ROUND_MOD_EXPR:
- case COMPOUND_EXPR:
- case PREDECREMENT_EXPR:
- case PREINCREMENT_EXPR:
- case POSTDECREMENT_EXPR:
- case POSTINCREMENT_EXPR:
- case ARRAY_REF:
- case SCOPE_REF:
- case TRY_CATCH_EXPR:
- case WITH_CLEANUP_EXPR:
- t = copy_node (t);
- TREE_OPERAND (t, 0) = mapcar (TREE_OPERAND (t, 0), func);
- TREE_OPERAND (t, 1) = mapcar (TREE_OPERAND (t, 1), func);
- return t;
-
- case CALL_EXPR:
- t = copy_node (t);
- TREE_TYPE (t) = mapcar (TREE_TYPE (t), func);
- TREE_OPERAND (t, 0) = mapcar (TREE_OPERAND (t, 0), func);
- TREE_OPERAND (t, 1) = mapcar (TREE_OPERAND (t, 1), func);
-
- /* tree.def says that operand two is RTL, but
- make_call_declarator puts trees in there. */
- if (TREE_OPERAND (t, 2)
- && TREE_CODE (TREE_OPERAND (t, 2)) == TREE_LIST)
- TREE_OPERAND (t, 2) = mapcar (TREE_OPERAND (t, 2), func);
- else
- TREE_OPERAND (t, 2) = NULL_TREE;
- return t;
-
- case CONVERT_EXPR:
- case ADDR_EXPR:
- case INDIRECT_REF:
- case NEGATE_EXPR:
- case BIT_NOT_EXPR:
- case TRUTH_NOT_EXPR:
- case NOP_EXPR:
- case COMPONENT_REF:
- case CLEANUP_POINT_EXPR:
- t = copy_node (t);
- TREE_TYPE (t) = mapcar (TREE_TYPE (t), func);
- TREE_OPERAND (t, 0) = mapcar (TREE_OPERAND (t, 0), func);
- return t;
-
- case POINTER_TYPE:
- tmp = build_pointer_type (mapcar (TREE_TYPE (t), func));
- return cp_build_qualified_type (tmp, TYPE_QUALS (t));
- case REFERENCE_TYPE:
- tmp = build_reference_type (mapcar (TREE_TYPE (t), func));
- return cp_build_qualified_type (tmp, TYPE_QUALS (t));
- case FUNCTION_TYPE:
- tmp = build_function_type (mapcar (TREE_TYPE (t), func),
- mapcar (TYPE_ARG_TYPES (t), func));
- return cp_build_qualified_type (tmp, TYPE_QUALS (t));
- case ARRAY_TYPE:
- tmp = build_cplus_array_type (mapcar (TREE_TYPE (t), func),
- mapcar (TYPE_DOMAIN (t), func));
- return cp_build_qualified_type (tmp, CP_TYPE_QUALS (t));
- case INTEGER_TYPE:
- tmp = build_index_type (mapcar (TYPE_MAX_VALUE (t), func));
- return cp_build_qualified_type (tmp, TYPE_QUALS (t));
- case OFFSET_TYPE:
- tmp = build_offset_type (mapcar (TYPE_OFFSET_BASETYPE (t), func),
- mapcar (TREE_TYPE (t), func));
- return cp_build_qualified_type (tmp, TYPE_QUALS (t));
- case METHOD_TYPE:
- tmp = build_cplus_method_type
- (mapcar (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (t))), func),
- mapcar (TREE_TYPE (t), func),
- mapcar (TREE_CHAIN (TYPE_ARG_TYPES (t)), func));
- return cp_build_qualified_type (tmp, TYPE_QUALS (t));
-
- case COMPLEX_CST:
- t = copy_node (t);
- TREE_REALPART (t) = mapcar (TREE_REALPART (t), func);
- TREE_IMAGPART (t) = mapcar (TREE_REALPART (t), func);
- return t;
-
- case CONSTRUCTOR:
- t = copy_node (t);
- CONSTRUCTOR_ELTS (t) = mapcar (CONSTRUCTOR_ELTS (t), func);
- return t;
-
- case TEMPLATE_TEMPLATE_PARM:
- return copy_template_template_parm (t);
-
- case BIND_EXPR:
- t = copy_node (t);
- TREE_OPERAND (t, 0) = mapcar (TREE_OPERAND (t, 0), func);
- TREE_OPERAND (t, 1) = mapcar (TREE_OPERAND (t, 1), func);
- TREE_OPERAND (t, 2) = NULL_TREE;
- return t;
-
- case NEW_EXPR:
- t = copy_node (t);
- TREE_OPERAND (t, 0) = mapcar (TREE_OPERAND (t, 0), func);
- TREE_OPERAND (t, 1) = mapcar (TREE_OPERAND (t, 1), func);
- TREE_OPERAND (t, 2) = mapcar (TREE_OPERAND (t, 2), func);
- return t;
-
- case LOOKUP_EXPR:
- t = copy_node (t);
- TREE_OPERAND (t, 0) = mapcar (TREE_OPERAND (t, 0), func);
- return t;
-
- case RECORD_TYPE:
- if (TYPE_PTRMEMFUNC_P (t))
- return build_ptrmemfunc_type
- (mapcar (TYPE_PTRMEMFUNC_FN_TYPE (t), func));
- /* else fall through */
-
- /* This list is incomplete, but should suffice for now.
- It is very important that `sorry' not call
- `report_error_function'. That could cause an infinite loop. */
- default:
- sorry ("initializer contains unrecognized tree code");
- return error_mark_node;
-
+ /* Because the chain gets clobbered when we make a copy, we save it
+ here. */
+ tree chain = TREE_CHAIN (*tp);
+
+ /* Copy the node. */
+ *tp = copy_node (*tp);
+
+ /* Now, restore the chain, if appropriate. That will cause
+ walk_tree to walk into the chain as well. */
+ if (code == PARM_DECL || code == TREE_LIST || code == OVERLOAD
+ || statement_code_p (code))
+ TREE_CHAIN (*tp) = chain;
+
+ /* For now, we don't update BLOCKs when we make copies. So, we
+ have to nullify all scope-statements. */
+ if (TREE_CODE (*tp) == SCOPE_STMT)
+ SCOPE_STMT_BLOCK (*tp) = NULL_TREE;
}
- my_friendly_abort (107);
- /* NOTREACHED */
- return NULL_TREE;
-}
-
-static tree
-perm_manip (t)
- tree t;
-{
- if (TREE_PERMANENT (t))
- return t;
+ else if (code == TEMPLATE_TEMPLATE_PARM)
+ /* These must be copied specially. */
+ *tp = copy_template_template_parm (*tp);
+ else if (TREE_CODE_CLASS (code) == 't')
+ /* There's no need to copy types, or anything beneath them. */
+ *walk_subtrees = 0;
- /* Support `void f () { extern int i; A<&i> a; }' */
- if ((TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == FUNCTION_DECL)
- && TREE_PUBLIC (t))
- {
- t = copy_node (t);
-
- /* copy_rtx won't make a new SYMBOL_REF, so call make_decl_rtl again. */
- DECL_RTL (t) = 0;
- make_decl_rtl (t, NULL_PTR, 1);
-
- return t;
- }
return NULL_TREE;
}
-/* Assuming T is a node built bottom-up, make it all exist on
- permanent obstack, if it is not permanent already. */
-
-tree
-copy_to_permanent (t)
- tree t;
-{
- if (t == NULL_TREE || TREE_PERMANENT (t))
- return t;
-
- push_obstacks_nochange ();
- end_temporary_allocation ();
-
- t = mapcar (t, perm_manip);
-
- pop_obstacks ();
-
- return t;
-}
-
#ifdef GATHER_STATISTICS
extern int depth_reached;
#endif
void
print_lang_statistics ()
{
- extern struct obstack decl_obstack;
- print_obstack_statistics ("class_obstack", &class_obstack);
- print_obstack_statistics ("decl_obstack", &decl_obstack);
print_search_statistics ();
print_class_statistics ();
#ifdef GATHER_STATISTICS
return sz;
}
-static
-tree
-bot_manip (t)
- tree t;
+/* Called from break_out_target_exprs via mapcar. */
+
+static tree
+bot_manip (tp, walk_subtrees, data)
+ tree *tp;
+ int *walk_subtrees;
+ void *data;
{
+ splay_tree target_remap = ((splay_tree) data);
+ tree t = *tp;
+
if (TREE_CODE (t) != TREE_LIST && ! TREE_SIDE_EFFECTS (t))
- return t;
+ {
+ /* There can't be any TARGET_EXPRs below this point. */
+ *walk_subtrees = 0;
+ return NULL_TREE;
+ }
else if (TREE_CODE (t) == TARGET_EXPR)
{
+ tree u;
+
if (TREE_CODE (TREE_OPERAND (t, 1)) == AGGR_INIT_EXPR)
{
mark_used (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 1), 0), 0));
- return build_cplus_new
+ u = build_cplus_new
(TREE_TYPE (t), break_out_target_exprs (TREE_OPERAND (t, 1)));
}
- t = copy_node (t);
- TREE_OPERAND (t, 0) = build (VAR_DECL, TREE_TYPE (t));
- layout_decl (TREE_OPERAND (t, 0), 0);
- return t;
+ else
+ {
+ u = copy_node (t);
+ TREE_OPERAND (u, 0) = build (VAR_DECL, TREE_TYPE (t));
+ layout_decl (TREE_OPERAND (u, 0), 0);
+ }
+
+ /* Map the old variable to the new one. */
+ splay_tree_insert (target_remap,
+ (splay_tree_key) TREE_OPERAND (t, 0),
+ (splay_tree_value) TREE_OPERAND (u, 0));
+
+ /* Replace the old expression with the new version. */
+ *tp = u;
+ /* We don't have to go below this point; the recursive call to
+ break_out_target_exprs will have handled anything below this
+ point. */
+ *walk_subtrees = 0;
+ return NULL_TREE;
}
else if (TREE_CODE (t) == CALL_EXPR)
mark_used (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
- return NULL_TREE;
+ /* Make a copy of this node. */
+ return copy_tree_r (tp, walk_subtrees, NULL);
}
-/* Actually, we'll just clean out the target exprs for the moment. */
+/* Replace all remapped VAR_DECLs in T with their new equivalents.
+ DATA is really a splay-tree mapping old variables to new
+ variables. */
+
+static tree
+bot_replace (t, walk_subtrees, data)
+ tree *t;
+ int *walk_subtrees ATTRIBUTE_UNUSED;
+ void *data;
+{
+ splay_tree target_remap = ((splay_tree) data);
+
+ if (TREE_CODE (*t) == VAR_DECL)
+ {
+ splay_tree_node n = splay_tree_lookup (target_remap,
+ (splay_tree_key) *t);
+ if (n)
+ *t = (tree) n->value;
+ }
+
+ return NULL_TREE;
+}
+
+/* When we parse a default argument expression, we may create
+ temporary variables via TARGET_EXPRs. When we actually use the
+ default-argument expression, we make a copy of the expression, but
+ we must replace the temporaries with appropriate local versions. */
tree
break_out_target_exprs (t)
tree t;
{
- return mapcar (t, bot_manip);
+ static int target_remap_count;
+ static splay_tree target_remap;
+
+ if (!target_remap_count++)
+ target_remap = splay_tree_new (splay_tree_compare_pointers,
+ /*splay_tree_delete_key_fn=*/NULL,
+ /*splay_tree_delete_value_fn=*/NULL);
+ walk_tree (&t, bot_manip, target_remap);
+ walk_tree (&t, bot_replace, target_remap);
+
+ if (!--target_remap_count)
+ {
+ splay_tree_delete (target_remap);
+ target_remap = NULL;
+ }
+
+ return t;
}
/* Obstack used for allocating nodes in template function and variable
definitions. */
-/* Similar to `build_nt', except we build
- on the permanent_obstack, regardless. */
+/* Similar to `build_nt', except that we set TREE_COMPLEXITY to be the
+ current line number. */
tree
build_min_nt VPROTO((enum tree_code code, ...))
{
-#ifndef __STDC__
+#ifndef ANSI_PROTOTYPES
enum tree_code code;
#endif
- register struct obstack *ambient_obstack = expression_obstack;
va_list p;
register tree t;
register int length;
VA_START (p, code);
-#ifndef __STDC__
+#ifndef ANSI_PROTOTYPES
code = va_arg (p, enum tree_code);
#endif
- expression_obstack = &permanent_obstack;
-
t = make_node (code);
length = tree_code_length[(int) code];
TREE_COMPLEXITY (t) = lineno;
for (i = 0; i < length; i++)
{
tree x = va_arg (p, tree);
- TREE_OPERAND (t, i) = copy_to_permanent (x);
+ TREE_OPERAND (t, i) = x;
}
va_end (p);
- expression_obstack = ambient_obstack;
return t;
}
-/* Similar to `build', except we build
- on the permanent_obstack, regardless. */
+/* Similar to `build', except we set TREE_COMPLEXITY to the current
+ line-number. */
tree
build_min VPROTO((enum tree_code code, tree tt, ...))
{
-#ifndef __STDC__
+#ifndef ANSI_PROTOTYPES
enum tree_code code;
tree tt;
#endif
- register struct obstack *ambient_obstack = expression_obstack;
va_list p;
register tree t;
register int length;
VA_START (p, tt);
-#ifndef __STDC__
+#ifndef ANSI_PROTOTYPES
code = va_arg (p, enum tree_code);
tt = va_arg (p, tree);
#endif
- expression_obstack = &permanent_obstack;
-
t = make_node (code);
length = tree_code_length[(int) code];
- TREE_TYPE (t) = copy_to_permanent (tt);
+ TREE_TYPE (t) = tt;
TREE_COMPLEXITY (t) = lineno;
for (i = 0; i < length; i++)
{
tree x = va_arg (p, tree);
- TREE_OPERAND (t, i) = copy_to_permanent (x);
+ TREE_OPERAND (t, i) = x;
}
va_end (p);
- expression_obstack = ambient_obstack;
return t;
}
-/* Same as `tree_cons' but make a permanent object. */
-
-tree
-min_tree_cons (purpose, value, chain)
- tree purpose, value, chain;
-{
- register tree node;
- register struct obstack *ambient_obstack = current_obstack;
- current_obstack = &permanent_obstack;
-
- node = tree_cons (copy_to_permanent (purpose),
- copy_to_permanent (value), chain);
- current_obstack = ambient_obstack;
- return node;
-}
-
tree
get_type_decl (t)
tree t;
return -1;
}
-/* Similar to make_tree_vec, but build on the momentary_obstack.
- Thus, these vectors are really and truly temporary. */
-
-tree
-make_temp_vec (len)
- int len;
-{
- register tree node;
- push_expression_obstack ();
- node = make_tree_vec (len);
- pop_obstacks ();
- return node;
-}
-
/* Build a wrapper around some pointer PTR so we can use it as a tree. */
tree
build_expr_ptr_wrapper (ptr)
void *ptr;
{
- tree t;
- push_expression_obstack ();
- t = build_ptr_wrapper (ptr);
- pop_obstacks ();
- return t;
+ return build_ptr_wrapper (ptr);
}
/* Build a wrapper around some integer I so we can use it as a tree. */
{
tree t;
- /* Make sure that we put these on the permanent obstack; up in
- add_pending_template, we pass this return value into perm_tree_cons,
- which also puts it on the permanent_obstack. However, this wasn't
- explicitly doing the same. */
- register struct obstack *ambient_obstack = current_obstack;
- current_obstack = &permanent_obstack;
-
t = make_node (SRCLOC);
SRCLOC_FILE (t) = file;
SRCLOC_LINE (t) = line;
- current_obstack = ambient_obstack;
-
return t;
}
return build_srcloc (input_filename, lineno);
}
-void
-push_expression_obstack ()
-{
- push_obstacks_nochange ();
- current_obstack = expression_obstack;
-}
-
/* The type of ARG when used as an lvalue. */
tree
pod_type_p (t)
tree t;
{
- tree f;
-
while (TREE_CODE (t) == ARRAY_TYPE)
t = TREE_TYPE (t);
- if (! IS_AGGR_TYPE (t))
+ if (INTEGRAL_TYPE_P (t))
+ return 1; /* integral, character or enumeral type */
+ if (FLOAT_TYPE_P (t))
return 1;
-
- if (CLASSTYPE_NON_AGGREGATE (t)
- || TYPE_HAS_COMPLEX_ASSIGN_REF (t)
- || TYPE_HAS_DESTRUCTOR (t))
+ if (TYPE_PTR_P (t))
+ return 1; /* pointer to non-member */
+ if (TYPE_PTRMEM_P (t))
+ return 1; /* pointer to member object */
+ if (TYPE_PTRMEMFUNC_P (t))
+ return 1; /* pointer to member function */
+
+ if (! CLASS_TYPE_P (t))
+ return 0; /* other non-class type (reference or function) */
+ if (CLASSTYPE_NON_POD_P (t))
return 0;
-
- for (f = TYPE_FIELDS (t); f; f = TREE_CHAIN (f))
- {
- if (TREE_CODE (f) != FIELD_DECL)
- continue;
-
- if (TREE_CODE (TREE_TYPE (f)) == REFERENCE_TYPE
- || TYPE_PTRMEMFUNC_P (TREE_TYPE (f))
- || TYPE_PTRMEM_P (TREE_TYPE (f)))
- return 0;
- }
-
return 1;
}
return 0;
}
+
+/* Return a new PTRMEM_CST of the indicated TYPE. The MEMBER is the
+ thing pointed to by the constant. */
+
+tree
+make_ptrmem_cst (type, member)
+ tree type;
+ tree member;
+{
+ tree ptrmem_cst = make_node (PTRMEM_CST);
+ /* If would seem a great convenience if make_node would set
+ TREE_CONSTANT for things of class `c', but it does not. */
+ TREE_CONSTANT (ptrmem_cst) = 1;
+ TREE_TYPE (ptrmem_cst) = type;
+ PTRMEM_CST_MEMBER (ptrmem_cst) = member;
+ return ptrmem_cst;
+}
+
+/* Mark ARG (which is really a list_hash_table **) for GC. */
+
+static void
+mark_list_hash (arg)
+ void *arg;
+{
+ struct list_hash *lh;
+
+ for (lh = * ((struct list_hash **) arg); lh; lh = lh->next)
+ ggc_mark_tree (lh->list);
+}
+
+/* Initialize tree.c. */
+
+void
+init_tree ()
+{
+ make_lang_type_fn = cp_make_lang_type;
+ lang_unsave = cp_unsave;
+ ggc_add_root (list_hash_table,
+ sizeof (list_hash_table) / sizeof (struct list_hash *),
+ sizeof (struct list_hash *),
+ mark_list_hash);
+}
+
+/* The SAVE_EXPR pointed to by TP is being copied. If ST contains
+ information indicating to what new SAVE_EXPR this one should be
+ mapped, use that one. Otherwise, create a new node and enter it in
+ ST. FN is the function into which the copy will be placed. */
+
+void
+remap_save_expr (tp, st, fn, walk_subtrees)
+ tree *tp;
+ splay_tree st;
+ tree fn;
+ int *walk_subtrees;
+{
+ splay_tree_node n;
+
+ /* See if we already encountered this SAVE_EXPR. */
+ n = splay_tree_lookup (st, (splay_tree_key) *tp);
+
+ /* If we didn't already remap this SAVE_EXPR, do so now. */
+ if (!n)
+ {
+ tree t = copy_node (*tp);
+
+ /* The SAVE_EXPR is now part of the function into which we
+ are inlining this body. */
+ SAVE_EXPR_CONTEXT (t) = fn;
+ /* And we haven't evaluated it yet. */
+ SAVE_EXPR_RTL (t) = NULL_RTX;
+ /* Remember this SAVE_EXPR. */
+ n = splay_tree_insert (st,
+ (splay_tree_key) *tp,
+ (splay_tree_value) t);
+ }
+ else
+ /* We've already walked into this SAVE_EXPR, so we needn't do it
+ again. */
+ *walk_subtrees = 0;
+
+ /* Replace this SAVE_EXPR with the copy. */
+ *tp = (tree) n->value;
+}
+
+/* Called via walk_tree. If *TP points to a DECL_STMT for a local
+ declaration, copies the declaration and enters it in the splay_tree
+ pointed to by DATA (which is really a `splay_tree *'). */
+
+static tree
+mark_local_for_remap_r (tp, walk_subtrees, data)
+ tree *tp;
+ int *walk_subtrees ATTRIBUTE_UNUSED;
+ void *data;
+{
+ tree t = *tp;
+ splay_tree st = (splay_tree) data;
+
+ if ((TREE_CODE (t) == DECL_STMT
+ && nonstatic_local_decl_p (DECL_STMT_DECL (t)))
+ || TREE_CODE (t) == LABEL_STMT)
+ {
+ tree decl;
+ tree copy;
+
+ /* Figure out what's being declared. */
+ decl = (TREE_CODE (t) == DECL_STMT
+ ? DECL_STMT_DECL (t) : LABEL_STMT_LABEL (t));
+
+ /* Make a copy. */
+ copy = copy_decl_for_inlining (decl,
+ DECL_CONTEXT (decl),
+ DECL_CONTEXT (decl));
+
+ /* Remember the copy. */
+ splay_tree_insert (st,
+ (splay_tree_key) decl,
+ (splay_tree_value) copy);
+ }
+
+ return NULL_TREE;
+}
+
+/* Called via walk_tree when an expression is unsaved. Using the
+ splay_tree pointed to by ST (which is really a `splay_tree *'),
+ remaps all local declarations to appropriate replacements. */
+
+static tree
+cp_unsave_r (tp, walk_subtrees, data)
+ tree *tp;
+ int *walk_subtrees;
+ void *data;
+{
+ splay_tree st = (splay_tree) data;
+ splay_tree_node n;
+
+ /* Only a local declaration (variable or label). */
+ if (nonstatic_local_decl_p (*tp))
+ {
+ /* Lookup the declaration. */
+ n = splay_tree_lookup (st, (splay_tree_key) *tp);
+
+ /* If it's there, remap it. */
+ if (n)
+ *tp = (tree) n->value;
+ }
+ else if (TREE_CODE (*tp) == SAVE_EXPR)
+ remap_save_expr (tp, st, current_function_decl, walk_subtrees);
+ else
+ {
+ copy_tree_r (tp, walk_subtrees, NULL);
+
+ /* Do whatever unsaving is required. */
+ unsave_expr_1 (*tp);
+ }
+
+ /* Keep iterating. */
+ return NULL_TREE;
+}
+
+/* Called by unsave_expr_now whenever an expression (*TP) needs to be
+ unsaved. */
+
+static void
+cp_unsave (tp)
+ tree *tp;
+{
+ splay_tree st;
+
+ /* Create a splay-tree to map old local variable declarations to new
+ ones. */
+ st = splay_tree_new (splay_tree_compare_pointers, NULL, NULL);
+
+ /* Walk the tree once figuring out what needs to be remapped. */
+ walk_tree (tp, mark_local_for_remap_r, st);
+
+ /* Walk the tree again, copying, remapping, and unsaving. */
+ walk_tree (tp, cp_unsave_r, st);
+
+ /* Clean up. */
+ splay_tree_delete (st);
+}