1 /* Language-dependent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987, 88, 92, 93, 94, 95, 1996 Free Software Foundation, Inc.
3 Hacked by Michael Tiemann (tiemann@cygnus.com)
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
31 extern void compiler_error ();
33 static tree get_identifier_list PROTO((tree));
34 static tree bot_manip PROTO((tree));
35 static tree perm_manip PROTO((tree));
36 static tree build_cplus_array_type_1 PROTO((tree, tree));
37 static void list_hash_add PROTO((int, tree));
38 static int list_hash PROTO((tree, tree, tree));
39 static tree list_hash_lookup PROTO((int, int, int, int, tree, tree,
41 static void propagate_binfo_offsets PROTO((tree, tree));
42 static int avoid_overlap PROTO((tree, tree));
43 static int lvalue_p_1 PROTO((tree, int));
45 #define CEIL(x,y) (((x) + (y) - 1) / (y))
47 /* Returns non-zero if REF is an lvalue. If
48 TREAT_CLASS_RVALUES_AS_LVALUES is non-zero, rvalues of class type
49 are considered lvalues. */
52 lvalue_p_1 (ref, treat_class_rvalues_as_lvalues)
54 int treat_class_rvalues_as_lvalues;
56 if (TREE_CODE (TREE_TYPE (ref)) == REFERENCE_TYPE)
59 if (ref == current_class_ptr && flag_this_is_variable <= 0)
62 switch (TREE_CODE (ref))
64 /* preincrements and predecrements are valid lvals, provided
65 what they refer to are valid lvals. */
66 case PREINCREMENT_EXPR:
67 case PREDECREMENT_EXPR:
72 case WITH_CLEANUP_EXPR:
75 return lvalue_p_1 (TREE_OPERAND (ref, 0),
76 treat_class_rvalues_as_lvalues);
82 if (TREE_READONLY (ref) && ! TREE_STATIC (ref)
83 && DECL_LANG_SPECIFIC (ref)
84 && DECL_IN_AGGR_P (ref))
90 if (TREE_CODE (TREE_TYPE (ref)) != METHOD_TYPE)
94 /* A currently unresolved scope ref. */
96 my_friendly_abort (103);
98 if (TREE_CODE (TREE_OPERAND (ref, 1)) == FUNCTION_DECL)
100 return (lvalue_p_1 (TREE_OPERAND (ref, 0),
101 treat_class_rvalues_as_lvalues)
102 && lvalue_p_1 (TREE_OPERAND (ref, 1),
103 treat_class_rvalues_as_lvalues));
107 return (lvalue_p_1 (TREE_OPERAND (ref, 1),
108 treat_class_rvalues_as_lvalues)
109 && lvalue_p_1 (TREE_OPERAND (ref, 2),
110 treat_class_rvalues_as_lvalues));
116 return lvalue_p_1 (TREE_OPERAND (ref, 1),
117 treat_class_rvalues_as_lvalues);
121 return (lvalue_p_1 (TREE_OPERAND (ref, 0),
122 treat_class_rvalues_as_lvalues)
123 && lvalue_p_1 (TREE_OPERAND (ref, 1),
124 treat_class_rvalues_as_lvalues));
127 return treat_class_rvalues_as_lvalues;
130 return (treat_class_rvalues_as_lvalues
131 && IS_AGGR_TYPE (TREE_TYPE (ref)));
134 /* All functions (except non-static-member functions) are
136 return !DECL_NONSTATIC_MEMBER_FUNCTION_P (ref);
145 /* Return nonzero if REF is an lvalue valid for this language.
146 Lvalues can be assigned, unless they have TREE_READONLY, or unless
147 they are FUNCTION_DECLs. Lvalues can have their address taken,
148 unless they have DECL_REGISTER. */
154 return lvalue_p_1 (ref, /*treat_class_rvalues_as_lvalues=*/0);
157 /* This differs from real_lvalue_p in that class rvalues are considered
164 return lvalue_p_1 (ref, /*treat_class_rvalues_as_lvalues=*/1);
167 /* Return nonzero if REF is an lvalue valid for this language;
168 otherwise, print an error message and return zero. */
171 lvalue_or_else (ref, string)
175 int win = lvalue_p (ref);
177 error ("non-lvalue in %s", string);
181 /* INIT is a CALL_EXPR which needs info about its target.
182 TYPE is the type that this initialization should appear to have.
184 Build an encapsulation of the initialization to perform
185 and return it so that it can be processed by language-independent
186 and language-specific expression expanders. */
189 build_cplus_new (type, init)
196 if (TREE_CODE (init) != CALL_EXPR && TREE_CODE (init) != AGGR_INIT_EXPR)
199 slot = build (VAR_DECL, type);
200 DECL_ARTIFICIAL (slot) = 1;
201 layout_decl (slot, 0);
202 rval = build (AGGR_INIT_EXPR, type,
203 TREE_OPERAND (init, 0), TREE_OPERAND (init, 1), slot);
204 TREE_SIDE_EFFECTS (rval) = 1;
205 rval = build (TARGET_EXPR, type, slot, rval, NULL_TREE, NULL_TREE);
206 TREE_SIDE_EFFECTS (rval) = 1;
211 /* Encapsulate the expression INIT in a TARGET_EXPR. */
214 get_target_expr (init)
220 slot = build (VAR_DECL, TREE_TYPE (init));
221 DECL_ARTIFICIAL (slot) = 1;
222 layout_decl (slot, 0);
223 rval = build (TARGET_EXPR, TREE_TYPE (init), slot, init,
224 NULL_TREE, NULL_TREE);
225 TREE_SIDE_EFFECTS (rval) = 1;
230 /* Recursively search EXP for CALL_EXPRs that need cleanups and replace
231 these CALL_EXPRs with tree nodes that will perform the cleanups. */
234 break_out_cleanups (exp)
239 if (TREE_CODE (tmp) == CALL_EXPR
240 && TYPE_NEEDS_DESTRUCTOR (TREE_TYPE (tmp)))
241 return build_cplus_new (TREE_TYPE (tmp), tmp);
243 while (TREE_CODE (tmp) == NOP_EXPR
244 || TREE_CODE (tmp) == CONVERT_EXPR
245 || TREE_CODE (tmp) == NON_LVALUE_EXPR)
247 if (TREE_CODE (TREE_OPERAND (tmp, 0)) == CALL_EXPR
248 && TYPE_NEEDS_DESTRUCTOR (TREE_TYPE (TREE_OPERAND (tmp, 0))))
250 TREE_OPERAND (tmp, 0)
251 = build_cplus_new (TREE_TYPE (TREE_OPERAND (tmp, 0)),
252 TREE_OPERAND (tmp, 0));
256 tmp = TREE_OPERAND (tmp, 0);
261 /* Recursively perform a preorder search EXP for CALL_EXPRs, making
262 copies where they are found. Returns a deep copy all nodes transitively
263 containing CALL_EXPRs. */
266 break_out_calls (exp)
269 register tree t1, t2 = NULL_TREE;
270 register enum tree_code code;
271 register int changed = 0;
274 if (exp == NULL_TREE)
277 code = TREE_CODE (exp);
279 if (code == CALL_EXPR)
280 return copy_node (exp);
282 /* Don't try and defeat a save_expr, as it should only be done once. */
283 if (code == SAVE_EXPR)
286 switch (TREE_CODE_CLASS (code))
291 case 'c': /* a constant */
292 case 't': /* a type node */
293 case 'x': /* something random, like an identifier or an ERROR_MARK. */
296 case 'd': /* A decl node */
297 #if 0 /* This is bogus. jason 9/21/94 */
299 t1 = break_out_calls (DECL_INITIAL (exp));
300 if (t1 != DECL_INITIAL (exp))
302 exp = copy_node (exp);
303 DECL_INITIAL (exp) = t1;
308 case 'b': /* A block node */
310 /* Don't know how to handle these correctly yet. Must do a
311 break_out_calls on all DECL_INITIAL values for local variables,
312 and also break_out_calls on all sub-blocks and sub-statements. */
317 case 'e': /* an expression */
318 case 'r': /* a reference */
319 case 's': /* an expression with side effects */
320 for (i = tree_code_length[(int) code] - 1; i >= 0; i--)
322 t1 = break_out_calls (TREE_OPERAND (exp, i));
323 if (t1 != TREE_OPERAND (exp, i))
325 exp = copy_node (exp);
326 TREE_OPERAND (exp, i) = t1;
331 case '<': /* a comparison expression */
332 case '2': /* a binary arithmetic expression */
333 t2 = break_out_calls (TREE_OPERAND (exp, 1));
334 if (t2 != TREE_OPERAND (exp, 1))
336 case '1': /* a unary arithmetic expression */
337 t1 = break_out_calls (TREE_OPERAND (exp, 0));
338 if (t1 != TREE_OPERAND (exp, 0))
342 if (tree_code_length[(int) code] == 1)
343 return build1 (code, TREE_TYPE (exp), t1);
345 return build (code, TREE_TYPE (exp), t1, t2);
352 extern struct obstack *current_obstack;
353 extern struct obstack permanent_obstack, class_obstack;
354 extern struct obstack *saveable_obstack;
355 extern struct obstack *expression_obstack;
357 /* Here is how primitive or already-canonicalized types' hash
358 codes are made. MUST BE CONSISTENT WITH tree.c !!! */
359 #define TYPE_HASH(TYPE) ((HOST_WIDE_INT) (TYPE) & 0777777)
361 /* Construct, lay out and return the type of methods belonging to class
362 BASETYPE and whose arguments are described by ARGTYPES and whose values
363 are described by RETTYPE. If each type exists already, reuse it. */
366 build_cplus_method_type (basetype, rettype, argtypes)
367 tree basetype, rettype, argtypes;
373 /* Make a node of the sort we want. */
374 t = make_node (METHOD_TYPE);
376 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
377 TREE_TYPE (t) = rettype;
378 if (IS_SIGNATURE (basetype))
379 ptype = build_signature_pointer_type (basetype);
381 ptype = build_pointer_type (basetype);
383 /* The actual arglist for this function includes a "hidden" argument
384 which is "this". Put it into the list of argument types. */
386 argtypes = tree_cons (NULL_TREE, ptype, argtypes);
387 TYPE_ARG_TYPES (t) = argtypes;
388 TREE_SIDE_EFFECTS (argtypes) = 1; /* Mark first argtype as "artificial". */
390 /* If we already have such a type, use the old one and free this one.
391 Note that it also frees up the above cons cell if found. */
392 hashcode = TYPE_HASH (basetype) + TYPE_HASH (rettype) + type_hash_list (argtypes);
393 t = type_hash_canon (hashcode, t);
395 if (TYPE_SIZE (t) == 0)
402 build_cplus_array_type_1 (elt_type, index_type)
406 register struct obstack *ambient_obstack = current_obstack;
407 register struct obstack *ambient_saveable_obstack = saveable_obstack;
410 /* We need a new one. If both ELT_TYPE and INDEX_TYPE are permanent,
411 make this permanent too. */
412 if (TREE_PERMANENT (elt_type)
413 && (index_type == 0 || TREE_PERMANENT (index_type)))
415 current_obstack = &permanent_obstack;
416 saveable_obstack = &permanent_obstack;
419 if (processing_template_decl
420 || uses_template_parms (index_type))
422 t = make_node (ARRAY_TYPE);
423 TREE_TYPE (t) = elt_type;
424 TYPE_DOMAIN (t) = index_type;
427 t = build_array_type (elt_type, index_type);
429 /* Push these needs up so that initialization takes place
431 TYPE_NEEDS_CONSTRUCTING (t) = TYPE_NEEDS_CONSTRUCTING (TYPE_MAIN_VARIANT (elt_type));
432 TYPE_NEEDS_DESTRUCTOR (t) = TYPE_NEEDS_DESTRUCTOR (TYPE_MAIN_VARIANT (elt_type));
433 current_obstack = ambient_obstack;
434 saveable_obstack = ambient_saveable_obstack;
439 build_cplus_array_type (elt_type, index_type)
444 int type_quals = CP_TYPE_QUALS (elt_type);
446 elt_type = TYPE_MAIN_VARIANT (elt_type);
448 t = build_cplus_array_type_1 (elt_type, index_type);
450 if (type_quals != TYPE_UNQUALIFIED)
451 t = cp_build_qualified_type (t, type_quals);
456 /* Make a variant type in the proper way for C/C++, propagating qualifiers
457 down to the element type of an array. */
460 cp_build_qualified_type (type, type_quals)
464 if (type == error_mark_node)
467 /* A restrict-qualified pointer type must be a pointer (or reference)
468 to object or incomplete type. */
469 if ((type_quals & TYPE_QUAL_RESTRICT)
470 && (!POINTER_TYPE_P (type)
471 || TYPE_PTRMEM_P (type)
472 || TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE))
474 cp_error ("`%T' cannot be `restrict'-qualified", type);
475 type_quals &= ~TYPE_QUAL_RESTRICT;
478 if (TREE_CODE (type) == ARRAY_TYPE)
480 tree real_main_variant = TYPE_MAIN_VARIANT (type);
482 push_obstacks (TYPE_OBSTACK (real_main_variant),
483 TYPE_OBSTACK (real_main_variant));
484 type = build_cplus_array_type_1 (cp_build_qualified_type
485 (TREE_TYPE (type), type_quals),
488 /* TYPE must be on same obstack as REAL_MAIN_VARIANT. If not,
489 make a copy. (TYPE might have come from the hash table and
490 REAL_MAIN_VARIANT might be in some function's obstack.) */
492 if (TYPE_OBSTACK (type) != TYPE_OBSTACK (real_main_variant))
494 type = copy_node (type);
495 TYPE_POINTER_TO (type) = TYPE_REFERENCE_TO (type) = 0;
498 TYPE_MAIN_VARIANT (type) = real_main_variant;
502 return build_qualified_type (type, type_quals);
505 /* Returns the canonical version of TYPE. In other words, if TYPE is
506 a typedef, returns the underlying type. The cv-qualification of
507 the type returned matches the type input; they will always be
511 canonical_type_variant (t)
514 return cp_build_qualified_type (TYPE_MAIN_VARIANT (t), CP_TYPE_QUALS (t));
517 /* Add OFFSET to all base types of T.
519 OFFSET, which is a type offset, is number of bytes.
521 Note that we don't have to worry about having two paths to the
522 same base type, since this type owns its association list. */
525 propagate_binfo_offsets (binfo, offset)
529 tree binfos = BINFO_BASETYPES (binfo);
530 int i, n_baselinks = binfos ? TREE_VEC_LENGTH (binfos) : 0;
532 for (i = 0; i < n_baselinks; /* note increment is done in the loop. */)
534 tree base_binfo = TREE_VEC_ELT (binfos, i);
536 if (TREE_VIA_VIRTUAL (base_binfo))
541 tree delta = NULL_TREE;
543 for (j = i+1; j < n_baselinks; j++)
544 if (! TREE_VIA_VIRTUAL (TREE_VEC_ELT (binfos, j)))
546 /* The next basetype offset must take into account the space
547 between the classes, not just the size of each class. */
548 delta = size_binop (MINUS_EXPR,
549 BINFO_OFFSET (TREE_VEC_ELT (binfos, j)),
550 BINFO_OFFSET (base_binfo));
555 if (BINFO_OFFSET_ZEROP (base_binfo))
556 BINFO_OFFSET (base_binfo) = offset;
558 BINFO_OFFSET (base_binfo)
559 = size_binop (PLUS_EXPR, BINFO_OFFSET (base_binfo), offset);
561 BINFO_OFFSET (base_binfo) = offset;
564 propagate_binfo_offsets (base_binfo, offset);
566 /* Go to our next class that counts for offset propagation. */
569 offset = size_binop (PLUS_EXPR, offset, delta);
574 /* Makes new binfos for the indirect bases under BINFO, and updates
575 BINFO_OFFSET for them and their bases. */
578 unshare_base_binfos (binfo)
581 tree binfos = BINFO_BASETYPES (binfo);
585 if (binfos == NULL_TREE)
588 /* Now unshare the structure beneath BINFO. */
589 for (j = TREE_VEC_LENGTH (binfos)-1;
592 tree base_binfo = TREE_VEC_ELT (binfos, j);
593 new_binfo = TREE_VEC_ELT (binfos, j)
594 = make_binfo (BINFO_OFFSET (base_binfo),
596 BINFO_VTABLE (base_binfo),
597 BINFO_VIRTUALS (base_binfo));
598 TREE_VIA_PUBLIC (new_binfo) = TREE_VIA_PUBLIC (base_binfo);
599 TREE_VIA_PROTECTED (new_binfo) = TREE_VIA_PROTECTED (base_binfo);
600 TREE_VIA_VIRTUAL (new_binfo) = TREE_VIA_VIRTUAL (base_binfo);
601 BINFO_INHERITANCE_CHAIN (new_binfo) = binfo;
602 unshare_base_binfos (new_binfo);
606 /* Finish the work of layout_record, now taking virtual bases into account.
607 Also compute the actual offsets that our base classes will have.
608 This must be performed after the fields are laid out, since virtual
609 baseclasses must lay down at the end of the record.
611 Returns the maximum number of virtual functions any of the
612 baseclasses provide. */
615 layout_basetypes (rec, max)
619 tree binfos = TYPE_BINFO_BASETYPES (rec);
620 int i, n_baseclasses = binfos ? TREE_VEC_LENGTH (binfos) : 0;
624 unsigned int record_align = MAX (BITS_PER_UNIT, TYPE_ALIGN (rec));
625 unsigned int desired_align;
627 /* Record size so far is CONST_SIZE bits, where CONST_SIZE is an integer. */
628 register unsigned int const_size = 0;
629 unsigned int nonvirtual_const_size;
631 #ifdef STRUCTURE_SIZE_BOUNDARY
632 /* Packed structures don't need to have minimum size. */
633 if (! TYPE_PACKED (rec))
634 record_align = MAX (record_align, STRUCTURE_SIZE_BOUNDARY);
637 /* Get all the virtual base types that this type uses. The
638 TREE_VALUE slot holds the virtual baseclass type. Note that
639 get_vbase_types makes copies of the virtual base BINFOs, so that
640 the vbase_types are unshared. */
641 CLASSTYPE_VBASECLASSES (rec) = vbase_types = get_vbase_types (rec);
643 my_friendly_assert (TREE_CODE (TYPE_SIZE (rec)) == INTEGER_CST, 19970302);
644 const_size = TREE_INT_CST_LOW (TYPE_SIZE (rec));
646 nonvirtual_const_size = const_size;
650 tree basetype = BINFO_TYPE (vbase_types);
653 desired_align = TYPE_ALIGN (basetype);
654 record_align = MAX (record_align, desired_align);
657 offset = integer_zero_node;
660 /* Give each virtual base type the alignment it wants. */
661 const_size = CEIL (const_size, desired_align) * desired_align;
662 offset = size_int (CEIL (const_size, BITS_PER_UNIT));
665 if (CLASSTYPE_VSIZE (basetype) > max)
666 max = CLASSTYPE_VSIZE (basetype);
667 BINFO_OFFSET (vbase_types) = offset;
669 /* Every virtual baseclass takes a least a UNIT, so that we can
670 take it's address and get something different for each base. */
671 const_size += MAX (BITS_PER_UNIT,
672 TREE_INT_CST_LOW (CLASSTYPE_SIZE (basetype)));
674 vbase_types = TREE_CHAIN (vbase_types);
679 /* Because a virtual base might take a single byte above,
680 we have to re-adjust the total size to make sure it is
681 a multiple of the alignment. */
682 /* Give the whole object the alignment it wants. */
683 const_size = CEIL (const_size, record_align) * record_align;
686 /* Set the alignment in the complete type. We don't set CLASSTYPE_ALIGN
687 here, as that is for this class, without any virtual base classes. */
688 TYPE_ALIGN (rec) = record_align;
689 if (const_size != nonvirtual_const_size)
691 TYPE_SIZE (rec) = size_int (const_size);
692 TYPE_SIZE_UNIT (rec) = size_binop (FLOOR_DIV_EXPR, TYPE_SIZE (rec),
693 size_int (BITS_PER_UNIT));
696 /* Now propagate offset information throughout the lattice. */
697 for (i = 0; i < n_baseclasses; i++)
699 register tree base_binfo = TREE_VEC_ELT (binfos, i);
700 register tree basetype = BINFO_TYPE (base_binfo);
701 tree field = TYPE_FIELDS (rec);
703 if (TREE_VIA_VIRTUAL (base_binfo))
706 my_friendly_assert (TREE_TYPE (field) == basetype, 23897);
708 if (get_base_distance (basetype, rec, 0, (tree*)0) == -2)
709 cp_warning ("direct base `%T' inaccessible in `%T' due to ambiguity",
712 BINFO_OFFSET (base_binfo)
713 = size_int (CEIL (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field)),
715 propagate_binfo_offsets (base_binfo, BINFO_OFFSET (base_binfo));
716 TYPE_FIELDS (rec) = TREE_CHAIN (field);
719 for (vbase_types = CLASSTYPE_VBASECLASSES (rec); vbase_types;
720 vbase_types = TREE_CHAIN (vbase_types))
722 BINFO_INHERITANCE_CHAIN (vbase_types) = TYPE_BINFO (rec);
723 unshare_base_binfos (vbase_types);
724 propagate_binfo_offsets (vbase_types, BINFO_OFFSET (vbase_types));
728 tree basetype = BINFO_TYPE (vbase_types);
729 if (get_base_distance (basetype, rec, 0, (tree*)0) == -2)
730 cp_warning ("virtual base `%T' inaccessible in `%T' due to ambiguity",
738 /* If the empty base field in DECL overlaps with a base of the same type in
739 NEWDECL, which is either another base field or the first data field of
740 the class, pad the base just before NEWDECL and return 1. Otherwise,
744 avoid_overlap (decl, newdecl)
749 if (newdecl == NULL_TREE
750 || ! types_overlap_p (TREE_TYPE (decl), TREE_TYPE (newdecl)))
753 for (field = decl; TREE_CHAIN (field) && TREE_CHAIN (field) != newdecl;
754 field = TREE_CHAIN (field))
757 DECL_SIZE (field) = integer_one_node;
762 /* Returns a list of fields to stand in for the base class subobjects
763 of REC. These fields are later removed by layout_basetypes. */
766 build_base_fields (rec)
769 /* Chain to hold all the new FIELD_DECLs which stand in for base class
771 tree base_decls = NULL_TREE;
772 tree binfos = TYPE_BINFO_BASETYPES (rec);
773 int n_baseclasses = binfos ? TREE_VEC_LENGTH (binfos) : 0;
775 int i, saw_empty = 0;
776 unsigned int base_align = 0;
778 for (i = 0; i < n_baseclasses; ++i)
780 register tree base_binfo = TREE_VEC_ELT (binfos, i);
781 register tree basetype = BINFO_TYPE (base_binfo);
783 if (TYPE_SIZE (basetype) == 0)
784 /* This error is now reported in xref_tag, thus giving better
785 location information. */
788 if (TREE_VIA_VIRTUAL (base_binfo))
791 decl = build_lang_field_decl (FIELD_DECL, NULL_TREE, basetype);
792 DECL_ARTIFICIAL (decl) = 1;
793 DECL_FIELD_CONTEXT (decl) = DECL_CLASS_CONTEXT (decl) = rec;
794 DECL_SIZE (decl) = CLASSTYPE_SIZE (basetype);
795 DECL_ALIGN (decl) = CLASSTYPE_ALIGN (basetype);
796 TREE_CHAIN (decl) = base_decls;
801 /* Brain damage for backwards compatibility. For no good reason,
802 the old layout_basetypes made every base at least as large as
803 the alignment for the bases up to that point, gratuitously
804 wasting space. So we do the same thing here. */
805 base_align = MAX (base_align, DECL_ALIGN (decl));
807 = size_int (MAX (TREE_INT_CST_LOW (DECL_SIZE (decl)),
810 else if (DECL_SIZE (decl) == integer_zero_node)
814 /* Reverse the list of fields so we allocate the bases in the proper
816 base_decls = nreverse (base_decls);
818 /* In the presence of empty base classes, we run the risk of allocating
819 two objects of the same class on top of one another. Avoid that. */
820 if (flag_new_abi && saw_empty)
821 for (decl = base_decls; decl; decl = TREE_CHAIN (decl))
823 if (DECL_SIZE (decl) == integer_zero_node)
825 /* First step through the following bases until we find
826 an overlap or a non-empty base. */
827 for (nextdecl = TREE_CHAIN (decl); nextdecl;
828 nextdecl = TREE_CHAIN (nextdecl))
830 if (avoid_overlap (decl, nextdecl)
831 || DECL_SIZE (nextdecl) != integer_zero_node)
835 /* If we're still looking, also check against the first
837 for (nextdecl = TYPE_FIELDS (rec);
838 nextdecl && TREE_CODE (nextdecl) != FIELD_DECL;
839 nextdecl = TREE_CHAIN (nextdecl))
841 avoid_overlap (decl, nextdecl);
849 /* Returns list of virtual base class pointers in a FIELD_DECL chain. */
852 build_vbase_pointer_fields (rec)
855 /* Chain to hold all the new FIELD_DECLs which point at virtual
857 tree vbase_decls = NULL_TREE;
858 tree binfos = TYPE_BINFO_BASETYPES (rec);
859 int n_baseclasses = binfos ? TREE_VEC_LENGTH (binfos) : 0;
863 /* Handle basetypes almost like fields, but record their
864 offsets differently. */
866 for (i = 0; i < n_baseclasses; i++)
868 register tree base_binfo = TREE_VEC_ELT (binfos, i);
869 register tree basetype = BINFO_TYPE (base_binfo);
871 if (TYPE_SIZE (basetype) == 0)
872 /* This error is now reported in xref_tag, thus giving better
873 location information. */
876 /* All basetypes are recorded in the association list of the
879 if (TREE_VIA_VIRTUAL (base_binfo))
884 /* The offset for a virtual base class is only used in computing
885 virtual function tables and for initializing virtual base
886 pointers. It is built once `get_vbase_types' is called. */
888 /* If this basetype can come from another vbase pointer
889 without an additional indirection, we will share
890 that pointer. If an indirection is involved, we
891 make our own pointer. */
892 for (j = 0; j < n_baseclasses; j++)
894 tree other_base_binfo = TREE_VEC_ELT (binfos, j);
895 if (! TREE_VIA_VIRTUAL (other_base_binfo)
896 && binfo_member (basetype,
897 CLASSTYPE_VBASECLASSES (BINFO_TYPE
902 FORMAT_VBASE_NAME (name, basetype);
903 decl = build_lang_field_decl (FIELD_DECL, get_identifier (name),
904 build_pointer_type (basetype));
905 /* If you change any of the below, take a look at all the
906 other VFIELD_BASEs and VTABLE_BASEs in the code, and change
908 DECL_ASSEMBLER_NAME (decl) = get_identifier (VTABLE_BASE);
909 DECL_VIRTUAL_P (decl) = 1;
910 DECL_ARTIFICIAL (decl) = 1;
911 DECL_FIELD_CONTEXT (decl) = rec;
912 DECL_CLASS_CONTEXT (decl) = rec;
913 DECL_FCONTEXT (decl) = basetype;
914 DECL_SAVED_INSNS (decl) = NULL_RTX;
915 DECL_FIELD_SIZE (decl) = 0;
916 DECL_ALIGN (decl) = TYPE_ALIGN (ptr_type_node);
917 TREE_CHAIN (decl) = vbase_decls;
918 BINFO_VPTR_FIELD (base_binfo) = decl;
922 /* The space this decl occupies has already been accounted for. */
930 /* Hashing of lists so that we don't make duplicates.
931 The entry point is `list_hash_canon'. */
933 /* Each hash table slot is a bucket containing a chain
934 of these structures. */
938 struct list_hash *next; /* Next structure in the bucket. */
939 int hashcode; /* Hash code of this list. */
940 tree list; /* The list recorded here. */
943 /* Now here is the hash table. When recording a list, it is added
944 to the slot whose index is the hash code mod the table size.
945 Note that the hash table is used for several kinds of lists.
946 While all these live in the same table, they are completely independent,
947 and the hash code is computed differently for each of these. */
949 #define TYPE_HASH_SIZE 59
950 static struct list_hash *list_hash_table[TYPE_HASH_SIZE];
952 /* Compute a hash code for a list (chain of TREE_LIST nodes
953 with goodies in the TREE_PURPOSE, TREE_VALUE, and bits of the
954 TREE_COMMON slots), by adding the hash codes of the individual entries. */
957 list_hash (purpose, value, chain)
958 tree purpose, value, chain;
960 register int hashcode = 0;
963 hashcode += TYPE_HASH (chain);
966 hashcode += TYPE_HASH (value);
970 hashcode += TYPE_HASH (purpose);
976 /* Look in the type hash table for a type isomorphic to TYPE.
977 If one is found, return it. Otherwise return 0. */
980 list_hash_lookup (hashcode, via_public, via_protected, via_virtual,
981 purpose, value, chain)
982 int hashcode, via_public, via_virtual, via_protected;
983 tree purpose, value, chain;
985 register struct list_hash *h;
987 for (h = list_hash_table[hashcode % TYPE_HASH_SIZE]; h; h = h->next)
988 if (h->hashcode == hashcode
989 && TREE_VIA_VIRTUAL (h->list) == via_virtual
990 && TREE_VIA_PUBLIC (h->list) == via_public
991 && TREE_VIA_PROTECTED (h->list) == via_protected
992 && TREE_PURPOSE (h->list) == purpose
993 && TREE_VALUE (h->list) == value
994 && TREE_CHAIN (h->list) == chain)
999 /* Add an entry to the list-hash-table
1000 for a list TYPE whose hash code is HASHCODE. */
1003 list_hash_add (hashcode, list)
1007 register struct list_hash *h;
1009 h = (struct list_hash *) obstack_alloc (&class_obstack, sizeof (struct list_hash));
1010 h->hashcode = hashcode;
1012 h->next = list_hash_table[hashcode % TYPE_HASH_SIZE];
1013 list_hash_table[hashcode % TYPE_HASH_SIZE] = h;
1016 /* Given TYPE, and HASHCODE its hash code, return the canonical
1017 object for an identical list if one already exists.
1018 Otherwise, return TYPE, and record it as the canonical object
1019 if it is a permanent object.
1021 To use this function, first create a list of the sort you want.
1022 Then compute its hash code from the fields of the list that
1023 make it different from other similar lists.
1024 Then call this function and use the value.
1025 This function frees the list you pass in if it is a duplicate. */
1027 /* Set to 1 to debug without canonicalization. Never set by program. */
1029 static int debug_no_list_hash = 0;
1032 hash_tree_cons (via_public, via_virtual, via_protected, purpose, value, chain)
1033 int via_public, via_virtual, via_protected;
1034 tree purpose, value, chain;
1036 struct obstack *ambient_obstack = current_obstack;
1040 if (! debug_no_list_hash)
1042 hashcode = list_hash (purpose, value, chain);
1043 t = list_hash_lookup (hashcode, via_public, via_protected, via_virtual,
1044 purpose, value, chain);
1049 current_obstack = &class_obstack;
1051 t = tree_cons (purpose, value, chain);
1052 TREE_VIA_PUBLIC (t) = via_public;
1053 TREE_VIA_PROTECTED (t) = via_protected;
1054 TREE_VIA_VIRTUAL (t) = via_virtual;
1056 /* If this is a new list, record it for later reuse. */
1057 if (! debug_no_list_hash)
1058 list_hash_add (hashcode, t);
1060 current_obstack = ambient_obstack;
1064 /* Constructor for hashed lists. */
1067 hash_tree_chain (value, chain)
1070 return hash_tree_cons (0, 0, 0, NULL_TREE, value, chain);
1073 /* Similar, but used for concatenating two lists. */
1076 hash_chainon (list1, list2)
1083 if (TREE_CHAIN (list1) == NULL_TREE)
1084 return hash_tree_chain (TREE_VALUE (list1), list2);
1085 return hash_tree_chain (TREE_VALUE (list1),
1086 hash_chainon (TREE_CHAIN (list1), list2));
1090 get_identifier_list (value)
1093 tree list = IDENTIFIER_AS_LIST (value);
1094 if (list != NULL_TREE
1095 && (TREE_CODE (list) != TREE_LIST
1096 || TREE_VALUE (list) != value))
1098 else if (IDENTIFIER_HAS_TYPE_VALUE (value)
1099 && TREE_CODE (IDENTIFIER_TYPE_VALUE (value)) == RECORD_TYPE
1100 && IDENTIFIER_TYPE_VALUE (value)
1101 == TYPE_MAIN_VARIANT (IDENTIFIER_TYPE_VALUE (value)))
1103 tree type = IDENTIFIER_TYPE_VALUE (value);
1105 if (TYPE_PTRMEMFUNC_P (type))
1107 else if (type == current_class_type)
1108 /* Don't mess up the constructor name. */
1109 list = tree_cons (NULL_TREE, value, NULL_TREE);
1112 if (! CLASSTYPE_ID_AS_LIST (type))
1113 CLASSTYPE_ID_AS_LIST (type)
1114 = perm_tree_cons (NULL_TREE, TYPE_IDENTIFIER (type), NULL_TREE);
1115 list = CLASSTYPE_ID_AS_LIST (type);
1122 get_decl_list (value)
1125 tree list = NULL_TREE;
1127 if (TREE_CODE (value) == IDENTIFIER_NODE)
1128 list = get_identifier_list (value);
1129 else if (TREE_CODE (value) == RECORD_TYPE
1130 && TYPE_LANG_SPECIFIC (value)
1131 && value == TYPE_MAIN_VARIANT (value))
1132 list = CLASSTYPE_AS_LIST (value);
1134 if (list != NULL_TREE)
1136 my_friendly_assert (TREE_CHAIN (list) == NULL_TREE, 301);
1140 return build_decl_list (NULL_TREE, value);
1143 /* Build an association between TYPE and some parameters:
1145 OFFSET is the offset added to `this' to convert it to a pointer
1148 BINFO is the base binfo to use, if we are deriving from one. This
1149 is necessary, as we want specialized parent binfos from base
1150 classes, so that the VTABLE_NAMEs of bases are for the most derived
1151 type, instead of the simple type.
1153 VTABLE is the virtual function table with which to initialize
1154 sub-objects of type TYPE.
1156 VIRTUALS are the virtual functions sitting in VTABLE. */
1159 make_binfo (offset, binfo, vtable, virtuals)
1161 tree vtable, virtuals;
1163 tree new_binfo = make_tree_vec (7);
1166 if (TREE_CODE (binfo) == TREE_VEC)
1167 type = BINFO_TYPE (binfo);
1171 binfo = CLASS_TYPE_P (type) ? TYPE_BINFO (binfo) : NULL_TREE;
1174 TREE_TYPE (new_binfo) = TYPE_MAIN_VARIANT (type);
1175 BINFO_OFFSET (new_binfo) = offset;
1176 BINFO_VTABLE (new_binfo) = vtable;
1177 BINFO_VIRTUALS (new_binfo) = virtuals;
1178 BINFO_VPTR_FIELD (new_binfo) = NULL_TREE;
1180 if (binfo && BINFO_BASETYPES (binfo) != NULL_TREE)
1181 BINFO_BASETYPES (new_binfo) = copy_node (BINFO_BASETYPES (binfo));
1185 /* Return the binfo value for ELEM in TYPE. */
1188 binfo_value (elem, type)
1192 if (get_base_distance (elem, type, 0, (tree *)0) == -2)
1193 compiler_error ("base class `%s' ambiguous in binfo_value",
1194 TYPE_NAME_STRING (elem));
1196 return TYPE_BINFO (type);
1197 if (TREE_CODE (elem) == RECORD_TYPE && TYPE_BINFO (elem) == type)
1199 return get_binfo (elem, type, 0);
1202 /* Return a reversed copy of the BINFO-chain given by PATH. (If the
1203 BINFO_INHERITANCE_CHAIN points from base classes to derived
1204 classes, it will instead point from derived classes to base
1205 classes.) Returns the first node in the reversed chain. */
1211 register tree prev = NULL_TREE, cur;
1212 push_expression_obstack ();
1213 for (cur = path; cur; cur = BINFO_INHERITANCE_CHAIN (cur))
1215 tree r = copy_node (cur);
1216 BINFO_INHERITANCE_CHAIN (r) = prev;
1227 unsigned HOST_WIDE_INT n;
1230 fprintf (stderr, "type \"%s\"; offset = %ld\n",
1231 TYPE_NAME_STRING (BINFO_TYPE (elem)),
1232 (long) TREE_INT_CST_LOW (BINFO_OFFSET (elem)));
1233 fprintf (stderr, "vtable type:\n");
1234 debug_tree (BINFO_TYPE (elem));
1235 if (BINFO_VTABLE (elem))
1236 fprintf (stderr, "vtable decl \"%s\"\n", IDENTIFIER_POINTER (DECL_NAME (BINFO_VTABLE (elem))));
1238 fprintf (stderr, "no vtable decl yet\n");
1239 fprintf (stderr, "virtuals:\n");
1240 virtuals = BINFO_VIRTUALS (elem);
1242 n = skip_rtti_stuff (&virtuals);
1246 tree fndecl = TREE_OPERAND (FNADDR_FROM_VTABLE_ENTRY (TREE_VALUE (virtuals)), 0);
1247 fprintf (stderr, "%s [%ld =? %ld]\n",
1248 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (fndecl)),
1249 (long) n, (long) TREE_INT_CST_LOW (DECL_VINDEX (fndecl)));
1251 virtuals = TREE_CHAIN (virtuals);
1255 /* Initialize an CPLUS_BINDING node that does not live on an obstack. */
1259 struct tree_binding* node;
1261 static struct tree_binding* source;
1264 extern struct obstack permanent_obstack;
1265 push_obstacks (&permanent_obstack, &permanent_obstack);
1266 source = (struct tree_binding*)make_node (CPLUS_BINDING);
1270 TREE_PERMANENT ((tree)node) = 0;
1279 if (TREE_CODE (t) == FUNCTION_DECL)
1281 else if (TREE_CODE (t) == OVERLOAD)
1283 for (i=0; t; t = OVL_CHAIN (t))
1288 my_friendly_abort (359);
1293 is_overloaded_fn (x)
1296 /* XXX A baselink is also considered an overloaded function.
1297 As is a placeholder from push_class_decls. */
1298 if (TREE_CODE (x) == TREE_LIST)
1300 my_friendly_assert (TREE_CODE (TREE_PURPOSE (x)) == TREE_VEC
1301 || TREE_CODE (TREE_PURPOSE (x)) == IDENTIFIER_NODE,
1305 return (TREE_CODE (x) == FUNCTION_DECL
1306 || TREE_CODE (x) == TEMPLATE_ID_EXPR
1307 || DECL_FUNCTION_TEMPLATE_P (x)
1308 || TREE_CODE (x) == OVERLOAD);
1312 really_overloaded_fn (x)
1315 /* A baselink is also considered an overloaded function.
1316 This might also be an ambiguous class member. */
1317 if (TREE_CODE (x) == TREE_LIST)
1319 return (TREE_CODE (x) == OVERLOAD
1320 && (TREE_CHAIN (x) != NULL_TREE
1321 || DECL_FUNCTION_TEMPLATE_P (OVL_FUNCTION (x))));
1328 my_friendly_assert (is_overloaded_fn (from), 9);
1329 /* A baselink is also considered an overloaded function. */
1330 if (TREE_CODE (from) == TREE_LIST)
1331 from = TREE_VALUE (from);
1332 return OVL_CURRENT (from);
1335 /* Return a new OVL node, concatenating it with the old one. */
1338 ovl_cons (decl, chain)
1342 tree result = make_node (OVERLOAD);
1343 TREE_TYPE (result) = unknown_type_node;
1344 OVL_FUNCTION (result) = decl;
1345 TREE_CHAIN (result) = chain;
1350 /* Same as ovl_cons, but on the scratch_obstack. */
1353 scratch_ovl_cons (value, chain)
1357 register struct obstack *ambient_obstack = current_obstack;
1358 extern struct obstack *expression_obstack;
1359 current_obstack = expression_obstack;
1360 node = ovl_cons (value, chain);
1361 current_obstack = ambient_obstack;
1365 /* Build a new overloaded function. If this is the first one,
1366 just return it; otherwise, ovl_cons the _DECLs */
1369 build_overload (decl, chain)
1375 if (TREE_CODE (chain) != OVERLOAD)
1376 chain = ovl_cons (chain, NULL_TREE);
1377 return ovl_cons (decl, chain);
1380 /* True if fn is in ovl. */
1383 ovl_member (fn, ovl)
1387 if (ovl == NULL_TREE)
1389 if (TREE_CODE (ovl) != OVERLOAD)
1390 return decls_match (ovl, fn);
1391 for (; ovl; ovl = OVL_CHAIN (ovl))
1392 if (decls_match (OVL_FUNCTION (ovl), fn))
1398 is_aggr_type_2 (t1, t2)
1401 if (TREE_CODE (t1) != TREE_CODE (t2))
1403 return IS_AGGR_TYPE (t1) && IS_AGGR_TYPE (t2);
1406 #define PRINT_RING_SIZE 4
1409 lang_printable_name (decl, v)
1413 static tree decl_ring[PRINT_RING_SIZE];
1414 static char *print_ring[PRINT_RING_SIZE];
1415 static int ring_counter;
1418 /* Only cache functions. */
1420 || TREE_CODE (decl) != FUNCTION_DECL
1421 || DECL_LANG_SPECIFIC (decl) == 0)
1422 return lang_decl_name (decl, v);
1424 /* See if this print name is lying around. */
1425 for (i = 0; i < PRINT_RING_SIZE; i++)
1426 if (decl_ring[i] == decl)
1427 /* yes, so return it. */
1428 return print_ring[i];
1430 if (++ring_counter == PRINT_RING_SIZE)
1433 if (current_function_decl != NULL_TREE)
1435 if (decl_ring[ring_counter] == current_function_decl)
1437 if (ring_counter == PRINT_RING_SIZE)
1439 if (decl_ring[ring_counter] == current_function_decl)
1440 my_friendly_abort (106);
1443 if (print_ring[ring_counter])
1444 free (print_ring[ring_counter]);
1446 print_ring[ring_counter] = xstrdup (lang_decl_name (decl, v));
1447 decl_ring[ring_counter] = decl;
1448 return print_ring[ring_counter];
1451 /* Build the FUNCTION_TYPE or METHOD_TYPE which may throw exceptions
1452 listed in RAISES. */
1455 build_exception_variant (type, raises)
1459 tree v = TYPE_MAIN_VARIANT (type);
1460 int type_quals = TYPE_QUALS (type);
1462 for (; v; v = TYPE_NEXT_VARIANT (v))
1464 if (TYPE_QUALS (v) != type_quals)
1467 /* @@ This should do set equality, not exact match. */
1468 if (simple_cst_list_equal (TYPE_RAISES_EXCEPTIONS (v), raises))
1469 /* List of exceptions raised matches previously found list.
1471 @@ Nice to free up storage used in consing up the
1472 @@ list of exceptions raised. */
1476 /* Need to build a new variant. */
1477 v = build_type_copy (type);
1479 if (raises && ! TREE_PERMANENT (raises))
1481 push_obstacks_nochange ();
1482 end_temporary_allocation ();
1483 raises = copy_list (raises);
1487 TYPE_RAISES_EXCEPTIONS (v) = raises;
1491 /* Given a TEMPLATE_TEMPLATE_PARM node T, create a new one together with its
1492 lang_specific field and its corresponding TEMPLATE_DECL node */
1495 copy_template_template_parm (t)
1498 tree template = TYPE_NAME (t);
1499 tree t2 = make_lang_type (TEMPLATE_TEMPLATE_PARM);
1500 template = copy_node (template);
1501 copy_lang_decl (template);
1502 TREE_TYPE (template) = t2;
1503 TYPE_NAME (t2) = template;
1504 TYPE_STUB_DECL (t2) = template;
1506 /* No need to copy these */
1507 TYPE_FIELDS (t2) = TYPE_FIELDS (t);
1508 TEMPLATE_TEMPLATE_PARM_TEMPLATE_INFO (t2)
1509 = TEMPLATE_TEMPLATE_PARM_TEMPLATE_INFO (t);
1513 /* Walk through the tree structure T, applying func. If func ever returns
1514 non-null, return that value. */
1517 search_tree (t, func)
1519 tree (*func) PROTO((tree));
1521 #define TRY(ARG) if (tmp=search_tree (ARG, func), tmp != NULL_TREE) return tmp
1528 if (tmp = func (t), tmp != NULL_TREE)
1531 switch (TREE_CODE (t))
1536 case IDENTIFIER_NODE:
1543 case NAMESPACE_DECL:
1547 TRY (TREE_TYPE (t));
1551 TRY (TREE_TYPE (t));
1552 TRY (TREE_CHAIN (t));
1556 TRY (TREE_PURPOSE (t));
1557 TRY (TREE_VALUE (t));
1558 TRY (TREE_CHAIN (t));
1562 TRY (OVL_FUNCTION (t));
1563 TRY (OVL_CHAIN (t));
1568 int len = TREE_VEC_LENGTH (t);
1572 TRY (TREE_VEC_ELT (t, len));
1583 TRY (TREE_TYPE (t));
1588 case AGGR_INIT_EXPR:
1590 TRY (TREE_OPERAND (t, 0));
1591 TRY (TREE_OPERAND (t, 1));
1592 TRY (TREE_OPERAND (t, 2));
1599 case TRUNC_DIV_EXPR:
1600 case TRUNC_MOD_EXPR:
1608 case BIT_ANDTC_EXPR:
1609 case TRUTH_ANDIF_EXPR:
1610 case TRUTH_ORIF_EXPR:
1618 case FLOOR_DIV_EXPR:
1619 case ROUND_DIV_EXPR:
1621 case FLOOR_MOD_EXPR:
1622 case ROUND_MOD_EXPR:
1624 case PREDECREMENT_EXPR:
1625 case PREINCREMENT_EXPR:
1626 case POSTDECREMENT_EXPR:
1627 case POSTINCREMENT_EXPR:
1630 case TRY_CATCH_EXPR:
1631 case WITH_CLEANUP_EXPR:
1633 TRY (TREE_OPERAND (t, 0));
1634 TRY (TREE_OPERAND (t, 1));
1643 case TRUTH_NOT_EXPR:
1645 case NON_LVALUE_EXPR:
1647 case CLEANUP_POINT_EXPR:
1651 TRY (TREE_OPERAND (t, 0));
1656 case REINTERPRET_CAST_EXPR:
1657 case CONST_CAST_EXPR:
1658 case STATIC_CAST_EXPR:
1659 case DYNAMIC_CAST_EXPR:
1666 TRY (TREE_REALPART (t));
1667 TRY (TREE_IMAGPART (t));
1671 TRY (CONSTRUCTOR_ELTS (t));
1674 case TEMPLATE_TEMPLATE_PARM:
1675 case TEMPLATE_PARM_INDEX:
1676 case TEMPLATE_TYPE_PARM:
1693 case REFERENCE_TYPE:
1694 TRY (TREE_TYPE (t));
1699 TRY (TREE_TYPE (t));
1700 TRY (TYPE_ARG_TYPES (t));
1704 TRY (TREE_TYPE (t));
1705 TRY (TYPE_DOMAIN (t));
1709 TRY (TYPE_MAX_VALUE (t));
1713 TRY (TREE_TYPE (t));
1714 TRY (TYPE_OFFSET_BASETYPE (t));
1718 if (TYPE_PTRMEMFUNC_P (t))
1719 TRY (TYPE_PTRMEMFUNC_FN_TYPE (t));
1722 /* This list is incomplete, but should suffice for now.
1723 It is very important that `sorry' not call
1724 `report_error_function'. That could cause an infinite loop. */
1726 sorry ("initializer contains unrecognized tree code");
1727 return error_mark_node;
1736 /* Passed to search_tree. Checks for the use of types with no linkage. */
1739 no_linkage_helper (t)
1743 && (IS_AGGR_TYPE (t) || TREE_CODE (t) == ENUMERAL_TYPE)
1744 && (decl_function_context (TYPE_MAIN_DECL (t))
1745 || ANON_AGGRNAME_P (TYPE_IDENTIFIER (t))))
1750 /* Check if the type T depends on a type with no linkage and if so, return
1754 no_linkage_check (t)
1757 t = search_tree (t, no_linkage_helper);
1758 if (t != error_mark_node)
1764 /* Subroutine of copy_to_permanent
1766 Assuming T is a node build bottom-up, make it all exist on
1767 permanent obstack, if it is not permanent already. */
1772 tree (*func) PROTO((tree));
1779 if (tmp = func (t), tmp != NULL_TREE)
1782 switch (TREE_CODE (t))
1785 return error_mark_node;
1790 /* Rather than aborting, return error_mark_node. This allows us
1791 to report a sensible error message on code like this:
1793 void g() { int i; f<i>(7); }
1797 void g() { const int i = 7; f<i>(7); }
1799 however, we must actually return the constant initializer. */
1800 tmp = decl_constant_value (t);
1802 return mapcar (tmp, func);
1804 return error_mark_node;
1808 tree chain = TREE_CHAIN (t);
1810 TREE_CHAIN (t) = mapcar (chain, func);
1811 TREE_TYPE (t) = mapcar (TREE_TYPE (t), func);
1812 DECL_INITIAL (t) = mapcar (DECL_INITIAL (t), func);
1813 DECL_SIZE (t) = mapcar (DECL_SIZE (t), func);
1819 tree chain = TREE_CHAIN (t);
1821 TREE_PURPOSE (t) = mapcar (TREE_PURPOSE (t), func);
1822 TREE_VALUE (t) = mapcar (TREE_VALUE (t), func);
1823 TREE_CHAIN (t) = mapcar (chain, func);
1829 tree chain = OVL_CHAIN (t);
1831 OVL_FUNCTION (t) = mapcar (OVL_FUNCTION (t), func);
1832 OVL_CHAIN (t) = mapcar (chain, func);
1838 int len = TREE_VEC_LENGTH (t);
1842 TREE_VEC_ELT (t, len) = mapcar (TREE_VEC_ELT (t, len), func);
1849 return copy_node (t);
1853 TREE_TYPE (t) = mapcar (TREE_TYPE (t), func);
1854 PTRMEM_CST_MEMBER (t) = mapcar (PTRMEM_CST_MEMBER (t), func);
1859 case AGGR_INIT_EXPR:
1861 TREE_OPERAND (t, 0) = mapcar (TREE_OPERAND (t, 0), func);
1862 TREE_OPERAND (t, 1) = mapcar (TREE_OPERAND (t, 1), func);
1863 TREE_OPERAND (t, 2) = mapcar (TREE_OPERAND (t, 2), func);
1868 TREE_OPERAND (t, 0) = mapcar (TREE_OPERAND (t, 0), func);
1875 case TRUNC_DIV_EXPR:
1876 case TRUNC_MOD_EXPR:
1884 case BIT_ANDTC_EXPR:
1885 case TRUTH_ANDIF_EXPR:
1886 case TRUTH_ORIF_EXPR:
1894 case FLOOR_DIV_EXPR:
1895 case ROUND_DIV_EXPR:
1897 case FLOOR_MOD_EXPR:
1898 case ROUND_MOD_EXPR:
1900 case PREDECREMENT_EXPR:
1901 case PREINCREMENT_EXPR:
1902 case POSTDECREMENT_EXPR:
1903 case POSTINCREMENT_EXPR:
1906 case TRY_CATCH_EXPR:
1907 case WITH_CLEANUP_EXPR:
1909 TREE_OPERAND (t, 0) = mapcar (TREE_OPERAND (t, 0), func);
1910 TREE_OPERAND (t, 1) = mapcar (TREE_OPERAND (t, 1), func);
1915 TREE_TYPE (t) = mapcar (TREE_TYPE (t), func);
1916 TREE_OPERAND (t, 0) = mapcar (TREE_OPERAND (t, 0), func);
1917 TREE_OPERAND (t, 1) = mapcar (TREE_OPERAND (t, 1), func);
1919 /* tree.def says that operand two is RTL, but
1920 make_call_declarator puts trees in there. */
1921 if (TREE_OPERAND (t, 2)
1922 && TREE_CODE (TREE_OPERAND (t, 2)) == TREE_LIST)
1923 TREE_OPERAND (t, 2) = mapcar (TREE_OPERAND (t, 2), func);
1925 TREE_OPERAND (t, 2) = NULL_TREE;
1933 case TRUTH_NOT_EXPR:
1936 case CLEANUP_POINT_EXPR:
1938 TREE_OPERAND (t, 0) = mapcar (TREE_OPERAND (t, 0), func);
1942 tmp = build_pointer_type (mapcar (TREE_TYPE (t), func));
1943 return cp_build_qualified_type (tmp, TYPE_QUALS (t));
1944 case REFERENCE_TYPE:
1945 tmp = build_reference_type (mapcar (TREE_TYPE (t), func));
1946 return cp_build_qualified_type (tmp, TYPE_QUALS (t));
1948 tmp = build_function_type (mapcar (TREE_TYPE (t), func),
1949 mapcar (TYPE_ARG_TYPES (t), func));
1950 return cp_build_qualified_type (tmp, TYPE_QUALS (t));
1952 tmp = build_cplus_array_type (mapcar (TREE_TYPE (t), func),
1953 mapcar (TYPE_DOMAIN (t), func));
1954 return cp_build_qualified_type (tmp, CP_TYPE_QUALS (t));
1956 tmp = build_index_type (mapcar (TYPE_MAX_VALUE (t), func));
1957 return cp_build_qualified_type (tmp, TYPE_QUALS (t));
1959 tmp = build_offset_type (mapcar (TYPE_OFFSET_BASETYPE (t), func),
1960 mapcar (TREE_TYPE (t), func));
1961 return cp_build_qualified_type (tmp, TYPE_QUALS (t));
1963 tmp = build_cplus_method_type
1964 (mapcar (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (t))), func),
1965 mapcar (TREE_TYPE (t), func),
1966 mapcar (TREE_CHAIN (TYPE_ARG_TYPES (t)), func));
1967 return cp_build_qualified_type (tmp, TYPE_QUALS (t));
1971 TREE_REALPART (t) = mapcar (TREE_REALPART (t), func);
1972 TREE_IMAGPART (t) = mapcar (TREE_REALPART (t), func);
1977 CONSTRUCTOR_ELTS (t) = mapcar (CONSTRUCTOR_ELTS (t), func);
1980 case TEMPLATE_TEMPLATE_PARM:
1981 return copy_template_template_parm (t);
1985 TREE_OPERAND (t, 0) = mapcar (TREE_OPERAND (t, 0), func);
1986 TREE_OPERAND (t, 1) = mapcar (TREE_OPERAND (t, 1), func);
1987 TREE_OPERAND (t, 2) = NULL_TREE;
1992 TREE_OPERAND (t, 0) = mapcar (TREE_OPERAND (t, 0), func);
1993 TREE_OPERAND (t, 1) = mapcar (TREE_OPERAND (t, 1), func);
1994 TREE_OPERAND (t, 2) = mapcar (TREE_OPERAND (t, 2), func);
1999 TREE_OPERAND (t, 0) = mapcar (TREE_OPERAND (t, 0), func);
2003 if (TYPE_PTRMEMFUNC_P (t))
2004 return build_ptrmemfunc_type
2005 (mapcar (TYPE_PTRMEMFUNC_FN_TYPE (t), func));
2006 /* else fall through */
2008 /* This list is incomplete, but should suffice for now.
2009 It is very important that `sorry' not call
2010 `report_error_function'. That could cause an infinite loop. */
2012 sorry ("initializer contains unrecognized tree code");
2013 return error_mark_node;
2016 my_friendly_abort (107);
2025 if (TREE_PERMANENT (t))
2028 /* Support `void f () { extern int i; A<&i> a; }' */
2029 if ((TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == FUNCTION_DECL)
2034 /* copy_rtx won't make a new SYMBOL_REF, so call make_decl_rtl again. */
2036 make_decl_rtl (t, NULL_PTR, 1);
2043 /* Assuming T is a node built bottom-up, make it all exist on
2044 permanent obstack, if it is not permanent already. */
2047 copy_to_permanent (t)
2050 if (t == NULL_TREE || TREE_PERMANENT (t))
2053 push_obstacks_nochange ();
2054 end_temporary_allocation ();
2056 t = mapcar (t, perm_manip);
2063 #ifdef GATHER_STATISTICS
2064 extern int depth_reached;
2068 print_lang_statistics ()
2070 extern struct obstack decl_obstack;
2071 print_obstack_statistics ("class_obstack", &class_obstack);
2072 print_obstack_statistics ("decl_obstack", &decl_obstack);
2073 print_search_statistics ();
2074 print_class_statistics ();
2075 #ifdef GATHER_STATISTICS
2076 fprintf (stderr, "maximum template instantiation depth reached: %d\n",
2081 /* This is used by the `assert' macro. It is provided in libgcc.a,
2082 which `cc' doesn't know how to link. Note that the C++ front-end
2083 no longer actually uses the `assert' macro (instead, it calls
2084 my_friendly_assert). But all of the back-end files still need this. */
2087 __eprintf (string, expression, line, filename)
2090 const char *expression;
2092 const char *filename;
2100 fprintf (stderr, string, expression, line, filename);
2105 /* Return, as an INTEGER_CST node, the number of elements for TYPE
2106 (which is an ARRAY_TYPE). This counts only elements of the top
2110 array_type_nelts_top (type)
2113 return fold (build (PLUS_EXPR, sizetype,
2114 array_type_nelts (type),
2118 /* Return, as an INTEGER_CST node, the number of elements for TYPE
2119 (which is an ARRAY_TYPE). This one is a recursive count of all
2120 ARRAY_TYPEs that are clumped together. */
2123 array_type_nelts_total (type)
2126 tree sz = array_type_nelts_top (type);
2127 type = TREE_TYPE (type);
2128 while (TREE_CODE (type) == ARRAY_TYPE)
2130 tree n = array_type_nelts_top (type);
2131 sz = fold (build (MULT_EXPR, sizetype, sz, n));
2132 type = TREE_TYPE (type);
2142 if (TREE_CODE (t) != TREE_LIST && ! TREE_SIDE_EFFECTS (t))
2144 else if (TREE_CODE (t) == TARGET_EXPR)
2146 if (TREE_CODE (TREE_OPERAND (t, 1)) == AGGR_INIT_EXPR)
2148 mark_used (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 1), 0), 0));
2149 return build_cplus_new
2150 (TREE_TYPE (t), break_out_target_exprs (TREE_OPERAND (t, 1)));
2153 TREE_OPERAND (t, 0) = build (VAR_DECL, TREE_TYPE (t));
2154 layout_decl (TREE_OPERAND (t, 0), 0);
2157 else if (TREE_CODE (t) == CALL_EXPR)
2158 mark_used (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
2163 /* Actually, we'll just clean out the target exprs for the moment. */
2166 break_out_target_exprs (t)
2169 return mapcar (t, bot_manip);
2172 /* Obstack used for allocating nodes in template function and variable
2175 /* Similar to `build_nt', except we build
2176 on the permanent_obstack, regardless. */
2179 build_min_nt VPROTO((enum tree_code code, ...))
2182 enum tree_code code;
2184 register struct obstack *ambient_obstack = expression_obstack;
2187 register int length;
2193 code = va_arg (p, enum tree_code);
2196 expression_obstack = &permanent_obstack;
2198 t = make_node (code);
2199 length = tree_code_length[(int) code];
2200 TREE_COMPLEXITY (t) = lineno;
2202 for (i = 0; i < length; i++)
2204 tree x = va_arg (p, tree);
2205 TREE_OPERAND (t, i) = copy_to_permanent (x);
2209 expression_obstack = ambient_obstack;
2213 /* Similar to `build', except we build
2214 on the permanent_obstack, regardless. */
2217 build_min VPROTO((enum tree_code code, tree tt, ...))
2220 enum tree_code code;
2223 register struct obstack *ambient_obstack = expression_obstack;
2226 register int length;
2232 code = va_arg (p, enum tree_code);
2233 tt = va_arg (p, tree);
2236 expression_obstack = &permanent_obstack;
2238 t = make_node (code);
2239 length = tree_code_length[(int) code];
2240 TREE_TYPE (t) = copy_to_permanent (tt);
2241 TREE_COMPLEXITY (t) = lineno;
2243 for (i = 0; i < length; i++)
2245 tree x = va_arg (p, tree);
2246 TREE_OPERAND (t, i) = copy_to_permanent (x);
2250 expression_obstack = ambient_obstack;
2254 /* Same as `tree_cons' but make a permanent object. */
2257 min_tree_cons (purpose, value, chain)
2258 tree purpose, value, chain;
2261 register struct obstack *ambient_obstack = current_obstack;
2262 current_obstack = &permanent_obstack;
2264 node = tree_cons (copy_to_permanent (purpose),
2265 copy_to_permanent (value), chain);
2266 current_obstack = ambient_obstack;
2274 if (TREE_CODE (t) == TYPE_DECL)
2276 if (TREE_CODE_CLASS (TREE_CODE (t)) == 't')
2277 return TYPE_STUB_DECL (t);
2279 my_friendly_abort (42);
2281 /* Stop compiler from complaining control reaches end of non-void function. */
2286 can_free (obstack, t)
2287 struct obstack *obstack;
2292 if (TREE_CODE (t) == TREE_VEC)
2293 size = (TREE_VEC_LENGTH (t)-1) * sizeof (tree) + sizeof (struct tree_vec);
2295 my_friendly_abort (42);
2297 #define ROUND(x) ((x + obstack_alignment_mask (obstack)) \
2298 & ~ obstack_alignment_mask (obstack))
2299 if ((char *)t + ROUND (size) == obstack_next_free (obstack))
2306 /* Return first vector element whose BINFO_TYPE is ELEM.
2307 Return 0 if ELEM is not in VEC. VEC may be NULL_TREE. */
2310 vec_binfo_member (elem, vec)
2316 for (i = 0; i < TREE_VEC_LENGTH (vec); ++i)
2317 if (same_type_p (elem, BINFO_TYPE (TREE_VEC_ELT (vec, i))))
2318 return TREE_VEC_ELT (vec, i);
2323 /* Kludge around the fact that DECL_CONTEXT for virtual functions returns
2324 the wrong thing for decl_function_context. Hopefully the uses in the
2325 backend won't matter, since we don't need a static chain for local class
2329 hack_decl_function_context (decl)
2332 if (TREE_CODE (decl) == FUNCTION_DECL && DECL_FUNCTION_MEMBER_P (decl))
2333 return decl_function_context (TYPE_MAIN_DECL (DECL_CLASS_CONTEXT (decl)));
2334 return decl_function_context (decl);
2337 /* Return truthvalue of whether T1 is the same tree structure as T2.
2338 Return 1 if they are the same.
2339 Return 0 if they are understandably different.
2340 Return -1 if either contains tree structure not understood by
2344 cp_tree_equal (t1, t2)
2347 register enum tree_code code1, code2;
2352 if (t1 == 0 || t2 == 0)
2355 code1 = TREE_CODE (t1);
2356 code2 = TREE_CODE (t2);
2358 if (code1 == NOP_EXPR || code1 == CONVERT_EXPR || code1 == NON_LVALUE_EXPR)
2360 if (code2 == NOP_EXPR || code2 == CONVERT_EXPR || code2 == NON_LVALUE_EXPR)
2361 return cp_tree_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
2363 return cp_tree_equal (TREE_OPERAND (t1, 0), t2);
2365 else if (code2 == NOP_EXPR || code2 == CONVERT_EXPR
2366 || code2 == NON_LVALUE_EXPR)
2367 return cp_tree_equal (t1, TREE_OPERAND (t2, 0));
2375 return TREE_INT_CST_LOW (t1) == TREE_INT_CST_LOW (t2)
2376 && TREE_INT_CST_HIGH (t1) == TREE_INT_CST_HIGH (t2);
2379 return REAL_VALUES_EQUAL (TREE_REAL_CST (t1), TREE_REAL_CST (t2));
2382 return TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
2383 && !bcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
2384 TREE_STRING_LENGTH (t1));
2387 /* We need to do this when determining whether or not two
2388 non-type pointer to member function template arguments
2390 if (!(same_type_p (TREE_TYPE (t1), TREE_TYPE (t2))
2391 /* The first operand is RTL. */
2392 && TREE_OPERAND (t1, 0) == TREE_OPERAND (t2, 0)))
2394 return cp_tree_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
2397 cmp = cp_tree_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2));
2400 cmp = cp_tree_equal (TREE_VALUE (t1), TREE_VALUE (t2));
2403 return cp_tree_equal (TREE_CHAIN (t1), TREE_CHAIN (t2));
2406 return cp_tree_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
2409 cmp = cp_tree_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
2412 return simple_cst_list_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
2415 /* Special case: if either target is an unallocated VAR_DECL,
2416 it means that it's going to be unified with whatever the
2417 TARGET_EXPR is really supposed to initialize, so treat it
2418 as being equivalent to anything. */
2419 if ((TREE_CODE (TREE_OPERAND (t1, 0)) == VAR_DECL
2420 && DECL_NAME (TREE_OPERAND (t1, 0)) == NULL_TREE
2421 && DECL_RTL (TREE_OPERAND (t1, 0)) == 0)
2422 || (TREE_CODE (TREE_OPERAND (t2, 0)) == VAR_DECL
2423 && DECL_NAME (TREE_OPERAND (t2, 0)) == NULL_TREE
2424 && DECL_RTL (TREE_OPERAND (t2, 0)) == 0))
2427 cmp = cp_tree_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
2430 return cp_tree_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
2432 case WITH_CLEANUP_EXPR:
2433 cmp = cp_tree_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
2436 return cp_tree_equal (TREE_OPERAND (t1, 2), TREE_OPERAND (t1, 2));
2439 if (TREE_OPERAND (t1, 1) == TREE_OPERAND (t2, 1))
2440 return cp_tree_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
2449 case TEMPLATE_PARM_INDEX:
2450 return TEMPLATE_PARM_IDX (t1) == TEMPLATE_PARM_IDX (t2)
2451 && TEMPLATE_PARM_LEVEL (t1) == TEMPLATE_PARM_LEVEL (t2);
2455 if (TREE_CODE (TREE_OPERAND (t1, 0)) != TREE_CODE (TREE_OPERAND (t2, 0)))
2457 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (t1, 0))) == 't')
2458 return same_type_p (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
2462 /* Two pointer-to-members are the same if they point to the same
2463 field or function in the same class. */
2464 return (PTRMEM_CST_MEMBER (t1) == PTRMEM_CST_MEMBER (t2)
2465 && same_type_p (PTRMEM_CST_CLASS (t1), PTRMEM_CST_CLASS (t2)));
2471 switch (TREE_CODE_CLASS (code1))
2481 for (i=0; i<tree_code_length[(int) code1]; ++i)
2483 cmp = cp_tree_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i));
2493 /* Similar to make_tree_vec, but build on the momentary_obstack.
2494 Thus, these vectors are really and truly temporary. */
2501 push_expression_obstack ();
2502 node = make_tree_vec (len);
2507 /* Build a wrapper around some pointer PTR so we can use it as a tree. */
2510 build_ptr_wrapper (ptr)
2513 tree t = make_node (WRAPPER);
2514 WRAPPER_PTR (t) = ptr;
2518 /* Same, but on the expression_obstack. */
2521 build_expr_ptr_wrapper (ptr)
2525 push_expression_obstack ();
2526 t = build_ptr_wrapper (ptr);
2531 /* Build a wrapper around some integer I so we can use it as a tree. */
2534 build_int_wrapper (i)
2537 tree t = make_node (WRAPPER);
2538 WRAPPER_INT (t) = i;
2543 build_srcloc (file, line)
2549 /* Make sure that we put these on the permanent obstack; up in
2550 add_pending_template, we pass this return value into perm_tree_cons,
2551 which also puts it on the permanent_obstack. However, this wasn't
2552 explicitly doing the same. */
2553 register struct obstack *ambient_obstack = current_obstack;
2554 current_obstack = &permanent_obstack;
2556 t = make_node (SRCLOC);
2557 SRCLOC_FILE (t) = file;
2558 SRCLOC_LINE (t) = line;
2560 current_obstack = ambient_obstack;
2566 build_srcloc_here ()
2568 return build_srcloc (input_filename, lineno);
2572 push_expression_obstack ()
2574 push_obstacks_nochange ();
2575 current_obstack = expression_obstack;
2578 /* The type of ARG when used as an lvalue. */
2584 tree type = TREE_TYPE (arg);
2585 if (TREE_CODE (arg) == OVERLOAD)
2586 type = unknown_type_node;
2590 /* The type of ARG for printing error messages; denote lvalues with
2597 tree type = TREE_TYPE (arg);
2598 if (TREE_CODE (type) == ARRAY_TYPE)
2600 else if (real_lvalue_p (arg))
2601 type = build_reference_type (lvalue_type (arg));
2602 else if (IS_AGGR_TYPE (type))
2603 type = lvalue_type (arg);
2608 /* Does FUNCTION use a variable-length argument list? */
2611 varargs_function_p (function)
2614 tree parm = TYPE_ARG_TYPES (TREE_TYPE (function));
2615 for (; parm; parm = TREE_CHAIN (parm))
2616 if (TREE_VALUE (parm) == void_type_node)
2621 /* Returns 1 if decl is a member of a class. */
2627 tree ctx = DECL_CONTEXT (decl);
2628 return (ctx && TREE_CODE_CLASS (TREE_CODE (ctx)) == 't');
2631 /* Create a placeholder for member access where we don't actually have an
2632 object that the access is against. */
2635 build_dummy_object (type)
2638 tree decl = build1 (NOP_EXPR, build_pointer_type (type), error_mark_node);
2639 return build_indirect_ref (decl, NULL_PTR);
2642 /* We've gotten a reference to a member of TYPE. Return *this if appropriate,
2643 or a dummy object otherwise. If BINFOP is non-0, it is filled with the
2644 binfo path from current_class_type to TYPE, or 0. */
2647 maybe_dummy_object (type, binfop)
2653 if (current_class_type
2654 && get_base_distance (type, current_class_type, 0, binfop) != -1)
2655 context = current_class_type;
2658 /* Reference from a nested class member function. */
2661 *binfop = TYPE_BINFO (type);
2664 if (current_class_ref && context == current_class_type)
2665 decl = current_class_ref;
2667 decl = build_dummy_object (context);
2672 /* Returns 1 if OB is a placeholder object, or a pointer to one. */
2675 is_dummy_object (ob)
2678 if (TREE_CODE (ob) == INDIRECT_REF)
2679 ob = TREE_OPERAND (ob, 0);
2680 return (TREE_CODE (ob) == NOP_EXPR
2681 && TREE_OPERAND (ob, 0) == error_mark_node);
2684 /* Returns 1 iff type T is a POD type, as defined in [basic.types]. */
2692 while (TREE_CODE (t) == ARRAY_TYPE)
2695 if (! IS_AGGR_TYPE (t))
2698 if (CLASSTYPE_NON_AGGREGATE (t)
2699 || TYPE_HAS_COMPLEX_ASSIGN_REF (t)
2700 || TYPE_HAS_DESTRUCTOR (t))
2703 for (f = TYPE_FIELDS (t); f; f = TREE_CHAIN (f))
2705 if (TREE_CODE (f) != FIELD_DECL)
2708 if (TREE_CODE (TREE_TYPE (f)) == REFERENCE_TYPE
2709 || TYPE_PTRMEMFUNC_P (TREE_TYPE (f))
2710 || TYPE_PTRMEM_P (TREE_TYPE (f)))