1 /* Language-dependent node constructors for parse phase of GNU compiler.
2 Copyright (C) 1987, 88, 92, 93, 94, 95, 1996 Free Software Foundation, Inc.
3 Hacked by Michael Tiemann (tiemann@cygnus.com)
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
35 #define CEIL(x,y) (((x) + (y) - 1) / (y))
37 /* Return nonzero if REF is an lvalue valid for this language.
38 Lvalues can be assigned, unless they have TREE_READONLY.
39 Lvalues can have their address taken, unless they have DECL_REGISTER. */
45 if (! language_lvalue_valid (ref))
48 if (TREE_CODE (TREE_TYPE (ref)) == REFERENCE_TYPE)
51 if (ref == current_class_decl && flag_this_is_variable <= 0)
54 switch (TREE_CODE (ref))
56 /* preincrements and predecrements are valid lvals, provided
57 what they refer to are valid lvals. */
58 case PREINCREMENT_EXPR:
59 case PREDECREMENT_EXPR:
62 return real_lvalue_p (TREE_OPERAND (ref, 0));
68 if (TREE_READONLY (ref) && ! TREE_STATIC (ref)
69 && DECL_LANG_SPECIFIC (ref)
70 && DECL_IN_AGGR_P (ref))
77 if (TREE_CODE (TREE_TYPE (ref)) != FUNCTION_TYPE
78 && TREE_CODE (TREE_TYPE (ref)) != METHOD_TYPE)
82 /* A currently unresolved scope ref. */
84 my_friendly_abort (103);
86 if (TREE_CODE (TREE_OPERAND (ref, 1)) == FUNCTION_DECL)
88 return real_lvalue_p (TREE_OPERAND (ref, 0))
89 && real_lvalue_p (TREE_OPERAND (ref, 1));
93 return (real_lvalue_p (TREE_OPERAND (ref, 1))
94 && real_lvalue_p (TREE_OPERAND (ref, 2)));
100 return real_lvalue_p (TREE_OPERAND (ref, 1));
104 return (real_lvalue_p (TREE_OPERAND (ref, 0))
105 && real_lvalue_p (TREE_OPERAND (ref, 1)));
115 if (! language_lvalue_valid (ref))
118 if (TREE_CODE (TREE_TYPE (ref)) == REFERENCE_TYPE)
121 if (ref == current_class_decl && flag_this_is_variable <= 0)
124 switch (TREE_CODE (ref))
126 /* preincrements and predecrements are valid lvals, provided
127 what they refer to are valid lvals. */
128 case PREINCREMENT_EXPR:
129 case PREDECREMENT_EXPR:
132 return lvalue_p (TREE_OPERAND (ref, 0));
138 if (TREE_READONLY (ref) && ! TREE_STATIC (ref)
139 && DECL_LANG_SPECIFIC (ref)
140 && DECL_IN_AGGR_P (ref))
147 if (TREE_CODE (TREE_TYPE (ref)) != FUNCTION_TYPE
148 && TREE_CODE (TREE_TYPE (ref)) != METHOD_TYPE)
156 if (IS_AGGR_TYPE (TREE_TYPE (ref)))
160 /* A currently unresolved scope ref. */
162 my_friendly_abort (103);
164 if (TREE_CODE (TREE_OPERAND (ref, 1)) == FUNCTION_DECL)
166 return lvalue_p (TREE_OPERAND (ref, 0))
167 && lvalue_p (TREE_OPERAND (ref, 1));
171 return (lvalue_p (TREE_OPERAND (ref, 1))
172 && lvalue_p (TREE_OPERAND (ref, 2)));
178 return lvalue_p (TREE_OPERAND (ref, 1));
182 return (lvalue_p (TREE_OPERAND (ref, 0))
183 && lvalue_p (TREE_OPERAND (ref, 1)));
189 /* Return nonzero if REF is an lvalue valid for this language;
190 otherwise, print an error message and return zero. */
193 lvalue_or_else (ref, string)
197 int win = lvalue_p (ref);
199 error ("non-lvalue in %s", string);
203 /* INIT is a CALL_EXPR which needs info about its target.
204 TYPE is the type that this initialization should appear to have.
206 Build an encapsulation of the initialization to perform
207 and return it so that it can be processed by language-independent
208 and language-specific expression expanders. */
210 build_cplus_new (type, init)
217 slot = build (VAR_DECL, type);
218 layout_decl (slot, 0);
219 rval = build (NEW_EXPR, type,
220 TREE_OPERAND (init, 0), TREE_OPERAND (init, 1), slot);
221 TREE_SIDE_EFFECTS (rval) = 1;
222 TREE_ADDRESSABLE (rval) = 1;
223 rval = build (TARGET_EXPR, type, slot, rval, 0);
224 TREE_SIDE_EFFECTS (rval) = 1;
225 TREE_ADDRESSABLE (rval) = 1;
230 /* Recursively search EXP for CALL_EXPRs that need cleanups and replace
231 these CALL_EXPRs with tree nodes that will perform the cleanups. */
234 break_out_cleanups (exp)
239 if (TREE_CODE (tmp) == CALL_EXPR
240 && TYPE_NEEDS_DESTRUCTOR (TREE_TYPE (tmp)))
241 return build_cplus_new (TREE_TYPE (tmp), tmp);
243 while (TREE_CODE (tmp) == NOP_EXPR
244 || TREE_CODE (tmp) == CONVERT_EXPR
245 || TREE_CODE (tmp) == NON_LVALUE_EXPR)
247 if (TREE_CODE (TREE_OPERAND (tmp, 0)) == CALL_EXPR
248 && TYPE_NEEDS_DESTRUCTOR (TREE_TYPE (TREE_OPERAND (tmp, 0))))
250 TREE_OPERAND (tmp, 0)
251 = build_cplus_new (TREE_TYPE (TREE_OPERAND (tmp, 0)),
252 TREE_OPERAND (tmp, 0));
256 tmp = TREE_OPERAND (tmp, 0);
261 /* Recursively perform a preorder search EXP for CALL_EXPRs, making
262 copies where they are found. Returns a deep copy all nodes transitively
263 containing CALL_EXPRs. */
266 break_out_calls (exp)
269 register tree t1, t2;
270 register enum tree_code code;
271 register int changed = 0;
274 if (exp == NULL_TREE)
277 code = TREE_CODE (exp);
279 if (code == CALL_EXPR)
280 return copy_node (exp);
282 /* Don't try and defeat a save_expr, as it should only be done once. */
283 if (code == SAVE_EXPR)
286 switch (TREE_CODE_CLASS (code))
291 case 'c': /* a constant */
292 case 't': /* a type node */
293 case 'x': /* something random, like an identifier or an ERROR_MARK. */
296 case 'd': /* A decl node */
297 #if 0 /* This is bogus. jason 9/21/94 */
299 t1 = break_out_calls (DECL_INITIAL (exp));
300 if (t1 != DECL_INITIAL (exp))
302 exp = copy_node (exp);
303 DECL_INITIAL (exp) = t1;
308 case 'b': /* A block node */
310 /* Don't know how to handle these correctly yet. Must do a
311 break_out_calls on all DECL_INITIAL values for local variables,
312 and also break_out_calls on all sub-blocks and sub-statements. */
317 case 'e': /* an expression */
318 case 'r': /* a reference */
319 case 's': /* an expression with side effects */
320 for (i = tree_code_length[(int) code] - 1; i >= 0; i--)
322 t1 = break_out_calls (TREE_OPERAND (exp, i));
323 if (t1 != TREE_OPERAND (exp, i))
325 exp = copy_node (exp);
326 TREE_OPERAND (exp, i) = t1;
331 case '<': /* a comparison expression */
332 case '2': /* a binary arithmetic expression */
333 t2 = break_out_calls (TREE_OPERAND (exp, 1));
334 if (t2 != TREE_OPERAND (exp, 1))
336 case '1': /* a unary arithmetic expression */
337 t1 = break_out_calls (TREE_OPERAND (exp, 0));
338 if (t1 != TREE_OPERAND (exp, 0))
342 if (tree_code_length[(int) code] == 1)
343 return build1 (code, TREE_TYPE (exp), t1);
345 return build (code, TREE_TYPE (exp), t1, t2);
352 extern struct obstack *current_obstack;
353 extern struct obstack permanent_obstack, class_obstack;
354 extern struct obstack *saveable_obstack;
356 /* Here is how primitive or already-canonicalized types' hash
357 codes are made. MUST BE CONSISTENT WITH tree.c !!! */
358 #define TYPE_HASH(TYPE) ((HOST_WIDE_INT) (TYPE) & 0777777)
360 /* Construct, lay out and return the type of methods belonging to class
361 BASETYPE and whose arguments are described by ARGTYPES and whose values
362 are described by RETTYPE. If each type exists already, reuse it. */
364 build_cplus_method_type (basetype, rettype, argtypes)
365 tree basetype, rettype, argtypes;
371 /* Make a node of the sort we want. */
372 t = make_node (METHOD_TYPE);
374 TYPE_METHOD_BASETYPE (t) = TYPE_MAIN_VARIANT (basetype);
375 TREE_TYPE (t) = rettype;
376 if (IS_SIGNATURE (basetype))
377 ptype = build_signature_pointer_type (TYPE_MAIN_VARIANT (basetype),
378 TYPE_READONLY (basetype),
379 TYPE_VOLATILE (basetype));
381 ptype = build_pointer_type (basetype);
383 /* The actual arglist for this function includes a "hidden" argument
384 which is "this". Put it into the list of argument types. */
386 argtypes = tree_cons (NULL_TREE, ptype, argtypes);
387 TYPE_ARG_TYPES (t) = argtypes;
388 TREE_SIDE_EFFECTS (argtypes) = 1; /* Mark first argtype as "artificial". */
390 /* If we already have such a type, use the old one and free this one.
391 Note that it also frees up the above cons cell if found. */
392 hashcode = TYPE_HASH (basetype) + TYPE_HASH (rettype) + type_hash_list (argtypes);
393 t = type_hash_canon (hashcode, t);
395 if (TYPE_SIZE (t) == 0)
402 build_cplus_array_type (elt_type, index_type)
406 register struct obstack *ambient_obstack = current_obstack;
407 register struct obstack *ambient_saveable_obstack = saveable_obstack;
410 /* We need a new one. If both ELT_TYPE and INDEX_TYPE are permanent,
411 make this permanent too. */
412 if (TREE_PERMANENT (elt_type)
413 && (index_type == 0 || TREE_PERMANENT (index_type)))
415 current_obstack = &permanent_obstack;
416 saveable_obstack = &permanent_obstack;
419 if (current_template_parms)
421 t = make_node (ARRAY_TYPE);
422 TREE_TYPE (t) = elt_type;
423 TYPE_DOMAIN (t) = index_type;
426 t = build_array_type (elt_type, index_type);
428 /* Push these needs up so that initialization takes place
430 TYPE_NEEDS_CONSTRUCTING (t) = TYPE_NEEDS_CONSTRUCTING (TYPE_MAIN_VARIANT (elt_type));
431 TYPE_NEEDS_DESTRUCTOR (t) = TYPE_NEEDS_DESTRUCTOR (TYPE_MAIN_VARIANT (elt_type));
432 current_obstack = ambient_obstack;
433 saveable_obstack = ambient_saveable_obstack;
437 /* Make a variant type in the proper way for C/C++, propagating qualifiers
438 down to the element type of an array. */
441 cp_build_type_variant (type, constp, volatilep)
443 int constp, volatilep;
445 if (type == error_mark_node)
448 if (TREE_CODE (type) == ARRAY_TYPE)
450 tree real_main_variant = TYPE_MAIN_VARIANT (type);
452 push_obstacks (TYPE_OBSTACK (real_main_variant),
453 TYPE_OBSTACK (real_main_variant));
454 type = build_cplus_array_type (cp_build_type_variant (TREE_TYPE (type),
458 /* TYPE must be on same obstack as REAL_MAIN_VARIANT. If not,
459 make a copy. (TYPE might have come from the hash table and
460 REAL_MAIN_VARIANT might be in some function's obstack.) */
462 if (TYPE_OBSTACK (type) != TYPE_OBSTACK (real_main_variant))
464 type = copy_node (type);
465 TYPE_POINTER_TO (type) = TYPE_REFERENCE_TO (type) = 0;
468 TYPE_MAIN_VARIANT (type) = real_main_variant;
471 return build_type_variant (type, constp, volatilep);
474 /* Add OFFSET to all base types of T.
476 OFFSET, which is a type offset, is number of bytes.
478 Note that we don't have to worry about having two paths to the
479 same base type, since this type owns its association list. */
481 propagate_binfo_offsets (binfo, offset)
485 tree binfos = BINFO_BASETYPES (binfo);
486 int i, n_baselinks = binfos ? TREE_VEC_LENGTH (binfos) : 0;
488 for (i = 0; i < n_baselinks; /* note increment is done in the loop. */)
490 tree base_binfo = TREE_VEC_ELT (binfos, i);
492 if (TREE_VIA_VIRTUAL (base_binfo))
497 tree base_binfos = BINFO_BASETYPES (base_binfo);
500 for (j = i+1; j < n_baselinks; j++)
501 if (! TREE_VIA_VIRTUAL (TREE_VEC_ELT (binfos, j)))
503 /* The next basetype offset must take into account the space
504 between the classes, not just the size of each class. */
505 delta = size_binop (MINUS_EXPR,
506 BINFO_OFFSET (TREE_VEC_ELT (binfos, j)),
507 BINFO_OFFSET (base_binfo));
512 if (BINFO_OFFSET_ZEROP (base_binfo))
513 BINFO_OFFSET (base_binfo) = offset;
515 BINFO_OFFSET (base_binfo)
516 = size_binop (PLUS_EXPR, BINFO_OFFSET (base_binfo), offset);
518 BINFO_OFFSET (base_binfo) = offset;
523 tree chain = NULL_TREE;
525 /* Now unshare the structure beneath BASE_BINFO. */
526 for (k = TREE_VEC_LENGTH (base_binfos)-1;
529 tree base_base_binfo = TREE_VEC_ELT (base_binfos, k);
530 if (! TREE_VIA_VIRTUAL (base_base_binfo))
531 TREE_VEC_ELT (base_binfos, k)
532 = make_binfo (BINFO_OFFSET (base_base_binfo),
534 BINFO_VTABLE (base_base_binfo),
535 BINFO_VIRTUALS (base_base_binfo),
537 chain = TREE_VEC_ELT (base_binfos, k);
538 TREE_VIA_PUBLIC (chain) = TREE_VIA_PUBLIC (base_base_binfo);
539 TREE_VIA_PROTECTED (chain) = TREE_VIA_PROTECTED (base_base_binfo);
540 BINFO_INHERITANCE_CHAIN (chain) = base_binfo;
542 /* Now propagate the offset to the base types. */
543 propagate_binfo_offsets (base_binfo, offset);
546 /* Go to our next class that counts for offset propagation. */
549 offset = size_binop (PLUS_EXPR, offset, delta);
554 /* Compute the actual offsets that our virtual base classes
555 will have *for this type*. This must be performed after
556 the fields are laid out, since virtual baseclasses must
557 lay down at the end of the record.
559 Returns the maximum number of virtual functions any of the virtual
560 baseclasses provide. */
562 layout_vbasetypes (rec, max)
566 /* Get all the virtual base types that this type uses.
567 The TREE_VALUE slot holds the virtual baseclass type. */
568 tree vbase_types = get_vbase_types (rec);
570 #ifdef STRUCTURE_SIZE_BOUNDARY
571 unsigned record_align = MAX (STRUCTURE_SIZE_BOUNDARY, TYPE_ALIGN (rec));
573 unsigned record_align = MAX (BITS_PER_UNIT, TYPE_ALIGN (rec));
577 /* Record size so far is CONST_SIZE + VAR_SIZE bits,
578 where CONST_SIZE is an integer
579 and VAR_SIZE is a tree expression.
580 If VAR_SIZE is null, the size is just CONST_SIZE.
581 Naturally we try to avoid using VAR_SIZE. */
582 register unsigned const_size = 0;
583 register tree var_size = 0;
584 int nonvirtual_const_size;
586 CLASSTYPE_VBASECLASSES (rec) = vbase_types;
588 if (TREE_CODE (TYPE_SIZE (rec)) == INTEGER_CST)
589 const_size = TREE_INT_CST_LOW (TYPE_SIZE (rec));
591 var_size = TYPE_SIZE (rec);
593 nonvirtual_const_size = const_size;
597 tree basetype = BINFO_TYPE (vbase_types);
600 desired_align = TYPE_ALIGN (basetype);
601 record_align = MAX (record_align, desired_align);
604 offset = integer_zero_node;
607 /* Give each virtual base type the alignment it wants. */
608 const_size = CEIL (const_size, TYPE_ALIGN (basetype))
609 * TYPE_ALIGN (basetype);
610 offset = size_int (CEIL (const_size, BITS_PER_UNIT));
613 if (CLASSTYPE_VSIZE (basetype) > max)
614 max = CLASSTYPE_VSIZE (basetype);
615 BINFO_OFFSET (vbase_types) = offset;
617 if (TREE_CODE (TYPE_SIZE (basetype)) == INTEGER_CST)
619 /* Every virtual baseclass takes a least a UNIT, so that we can
620 take it's address and get something different for each base. */
621 const_size += MAX (BITS_PER_UNIT,
622 TREE_INT_CST_LOW (TYPE_SIZE (basetype))
623 - TREE_INT_CST_LOW (CLASSTYPE_VBASE_SIZE (basetype)));
625 else if (var_size == 0)
626 var_size = TYPE_SIZE (basetype);
628 var_size = size_binop (PLUS_EXPR, var_size, TYPE_SIZE (basetype));
630 vbase_types = TREE_CHAIN (vbase_types);
635 /* Because a virtual base might take a single byte above,
636 we have to re-adjust the total size to make sure it it
637 a multiple of the alignment. */
638 /* Give the whole object the alignment it wants. */
639 const_size = CEIL (const_size, record_align) * record_align;
642 /* Set the alignment in the complete type. We don't set CLASSTYPE_ALIGN
643 here, as that is for this class, without any virtual base classes. */
644 TYPE_ALIGN (rec) = record_align;
645 if (const_size != nonvirtual_const_size)
647 CLASSTYPE_VBASE_SIZE (rec)
648 = size_int (const_size - nonvirtual_const_size);
649 TYPE_SIZE (rec) = size_int (const_size);
652 /* Now propagate offset information throughout the lattice
653 under the vbase type. */
654 for (vbase_types = CLASSTYPE_VBASECLASSES (rec); vbase_types;
655 vbase_types = TREE_CHAIN (vbase_types))
657 tree base_binfos = BINFO_BASETYPES (vbase_types);
659 BINFO_INHERITANCE_CHAIN (vbase_types) = TYPE_BINFO (rec);
663 tree chain = NULL_TREE;
665 /* Now unshare the structure beneath BASE_BINFO. */
667 for (j = TREE_VEC_LENGTH (base_binfos)-1;
670 tree base_base_binfo = TREE_VEC_ELT (base_binfos, j);
671 if (! TREE_VIA_VIRTUAL (base_base_binfo))
672 TREE_VEC_ELT (base_binfos, j)
673 = make_binfo (BINFO_OFFSET (base_base_binfo),
675 BINFO_VTABLE (base_base_binfo),
676 BINFO_VIRTUALS (base_base_binfo),
678 chain = TREE_VEC_ELT (base_binfos, j);
679 TREE_VIA_PUBLIC (chain) = TREE_VIA_PUBLIC (base_base_binfo);
680 TREE_VIA_PROTECTED (chain) = TREE_VIA_PROTECTED (base_base_binfo);
681 BINFO_INHERITANCE_CHAIN (chain) = vbase_types;
684 propagate_binfo_offsets (vbase_types, BINFO_OFFSET (vbase_types));
691 /* Lay out the base types of a record type, REC.
692 Tentatively set the size and alignment of REC
693 according to the base types alone.
695 Offsets for immediate nonvirtual baseclasses are also computed here.
697 TYPE_BINFO (REC) should be NULL_TREE on entry, and this routine
698 creates a list of base_binfos in TYPE_BINFO (REC) from BINFOS.
700 Returns list of virtual base classes in a FIELD_DECL chain. */
702 layout_basetypes (rec, binfos)
705 /* Chain to hold all the new FIELD_DECLs which point at virtual
707 tree vbase_decls = NULL_TREE;
709 #ifdef STRUCTURE_SIZE_BOUNDARY
710 unsigned record_align = MAX (STRUCTURE_SIZE_BOUNDARY, TYPE_ALIGN (rec));
712 unsigned record_align = MAX (BITS_PER_UNIT, TYPE_ALIGN (rec));
715 /* Record size so far is CONST_SIZE + VAR_SIZE bits, where CONST_SIZE is
716 an integer and VAR_SIZE is a tree expression. If VAR_SIZE is null,
717 the size is just CONST_SIZE. Naturally we try to avoid using
718 VAR_SIZE. And so far, we've been successful. */
720 register tree var_size = 0;
723 register unsigned const_size = 0;
724 int i, n_baseclasses = binfos ? TREE_VEC_LENGTH (binfos) : 0;
726 /* Handle basetypes almost like fields, but record their
727 offsets differently. */
729 for (i = 0; i < n_baseclasses; i++)
731 int inc, desired_align, int_vbase_size;
732 register tree base_binfo = TREE_VEC_ELT (binfos, i);
733 register tree basetype = BINFO_TYPE (base_binfo);
736 if (TYPE_SIZE (basetype) == 0)
739 /* This error is now reported in xref_tag, thus giving better
740 location information. */
741 error_with_aggr_type (base_binfo,
742 "base class `%s' has incomplete type");
744 TREE_VIA_PUBLIC (base_binfo) = 1;
745 TREE_VIA_PROTECTED (base_binfo) = 0;
746 TREE_VIA_VIRTUAL (base_binfo) = 0;
748 /* Should handle this better so that
751 class B: private A { virtual void F(); };
753 does not dump core when compiled. */
754 my_friendly_abort (121);
759 /* All basetypes are recorded in the association list of the
762 if (TREE_VIA_VIRTUAL (base_binfo))
765 char *name = (char *)alloca (TYPE_NAME_LENGTH (basetype)
766 + sizeof (VBASE_NAME) + 1);
768 /* The offset for a virtual base class is only used in computing
769 virtual function tables and for initializing virtual base
770 pointers. It is built once `get_vbase_types' is called. */
772 /* If this basetype can come from another vbase pointer
773 without an additional indirection, we will share
774 that pointer. If an indirection is involved, we
775 make our own pointer. */
776 for (j = 0; j < n_baseclasses; j++)
778 tree other_base_binfo = TREE_VEC_ELT (binfos, j);
779 if (! TREE_VIA_VIRTUAL (other_base_binfo)
780 && binfo_member (basetype,
781 CLASSTYPE_VBASECLASSES (BINFO_TYPE (other_base_binfo))))
784 sprintf (name, VBASE_NAME_FORMAT, TYPE_NAME_STRING (basetype));
785 decl = build_lang_field_decl (FIELD_DECL, get_identifier (name),
786 build_pointer_type (basetype));
787 /* If you change any of the below, take a look at all the
788 other VFIELD_BASEs and VTABLE_BASEs in the code, and change
790 DECL_ASSEMBLER_NAME (decl) = get_identifier (VTABLE_BASE);
791 DECL_VIRTUAL_P (decl) = 1;
792 DECL_FIELD_CONTEXT (decl) = rec;
793 DECL_CLASS_CONTEXT (decl) = rec;
794 DECL_FCONTEXT (decl) = basetype;
795 DECL_SAVED_INSNS (decl) = NULL_RTX;
796 DECL_FIELD_SIZE (decl) = 0;
797 DECL_ALIGN (decl) = TYPE_ALIGN (ptr_type_node);
798 TREE_CHAIN (decl) = vbase_decls;
799 BINFO_VPTR_FIELD (base_binfo) = decl;
802 if (warn_nonvdtor && TYPE_HAS_DESTRUCTOR (basetype)
803 && DECL_VINDEX (TREE_VEC_ELT (CLASSTYPE_METHOD_VEC (basetype), 1)) == NULL_TREE)
805 warning_with_decl (TREE_VEC_ELT (CLASSTYPE_METHOD_VEC (basetype), 1),
806 "destructor `%s' non-virtual");
807 warning ("in inheritance relationship `%s: virtual %s'",
808 TYPE_NAME_STRING (rec),
809 TYPE_NAME_STRING (basetype));
812 /* The space this decl occupies has already been accounted for. */
817 offset = integer_zero_node;
820 /* Give each base type the alignment it wants. */
821 const_size = CEIL (const_size, TYPE_ALIGN (basetype))
822 * TYPE_ALIGN (basetype);
823 offset = size_int ((const_size + BITS_PER_UNIT - 1) / BITS_PER_UNIT);
826 /* bpk: Disabled this check until someone is willing to
827 claim it as theirs and explain exactly what circumstances
828 warrant the warning. */
829 if (warn_nonvdtor && TYPE_HAS_DESTRUCTOR (basetype)
830 && DECL_VINDEX (TREE_VEC_ELT (CLASSTYPE_METHOD_VEC (basetype), 1)) == NULL_TREE)
832 warning_with_decl (TREE_VEC_ELT (CLASSTYPE_METHOD_VEC (basetype), 1),
833 "destructor `%s' non-virtual");
834 warning ("in inheritance relationship `%s:%s %s'",
835 TYPE_NAME_STRING (rec),
836 TREE_VIA_VIRTUAL (base_binfo) ? " virtual" : "",
837 TYPE_NAME_STRING (basetype));
841 BINFO_OFFSET (base_binfo) = offset;
842 if (CLASSTYPE_VSIZE (basetype))
844 BINFO_VTABLE (base_binfo) = TYPE_BINFO_VTABLE (basetype);
845 BINFO_VIRTUALS (base_binfo) = TYPE_BINFO_VIRTUALS (basetype);
847 TREE_CHAIN (base_binfo) = TYPE_BINFO (rec);
848 TYPE_BINFO (rec) = base_binfo;
850 /* Add only the amount of storage not present in
851 the virtual baseclasses. */
853 int_vbase_size = TREE_INT_CST_LOW (CLASSTYPE_VBASE_SIZE (basetype));
854 if (TREE_INT_CST_LOW (TYPE_SIZE (basetype)) > int_vbase_size)
856 inc = MAX (record_align,
857 (TREE_INT_CST_LOW (TYPE_SIZE (basetype))
860 /* Record must have at least as much alignment as any field. */
861 desired_align = TYPE_ALIGN (basetype);
862 record_align = MAX (record_align, desired_align);
869 CLASSTYPE_SIZE (rec) = size_int (const_size);
871 CLASSTYPE_SIZE (rec) = integer_zero_node;
872 CLASSTYPE_ALIGN (rec) = record_align;
877 /* Hashing of lists so that we don't make duplicates.
878 The entry point is `list_hash_canon'. */
880 /* Each hash table slot is a bucket containing a chain
881 of these structures. */
885 struct list_hash *next; /* Next structure in the bucket. */
886 int hashcode; /* Hash code of this list. */
887 tree list; /* The list recorded here. */
890 /* Now here is the hash table. When recording a list, it is added
891 to the slot whose index is the hash code mod the table size.
892 Note that the hash table is used for several kinds of lists.
893 While all these live in the same table, they are completely independent,
894 and the hash code is computed differently for each of these. */
896 #define TYPE_HASH_SIZE 59
897 struct list_hash *list_hash_table[TYPE_HASH_SIZE];
899 /* Compute a hash code for a list (chain of TREE_LIST nodes
900 with goodies in the TREE_PURPOSE, TREE_VALUE, and bits of the
901 TREE_COMMON slots), by adding the hash codes of the individual entries. */
907 register int hashcode = 0;
909 if (TREE_CHAIN (list))
910 hashcode += TYPE_HASH (TREE_CHAIN (list));
912 if (TREE_VALUE (list))
913 hashcode += TYPE_HASH (TREE_VALUE (list));
916 if (TREE_PURPOSE (list))
917 hashcode += TYPE_HASH (TREE_PURPOSE (list));
923 /* Look in the type hash table for a type isomorphic to TYPE.
924 If one is found, return it. Otherwise return 0. */
927 list_hash_lookup (hashcode, list)
931 register struct list_hash *h;
932 for (h = list_hash_table[hashcode % TYPE_HASH_SIZE]; h; h = h->next)
933 if (h->hashcode == hashcode
934 && TREE_VIA_VIRTUAL (h->list) == TREE_VIA_VIRTUAL (list)
935 && TREE_VIA_PUBLIC (h->list) == TREE_VIA_PUBLIC (list)
936 && TREE_VIA_PROTECTED (h->list) == TREE_VIA_PROTECTED (list)
937 && TREE_PURPOSE (h->list) == TREE_PURPOSE (list)
938 && TREE_VALUE (h->list) == TREE_VALUE (list)
939 && TREE_CHAIN (h->list) == TREE_CHAIN (list))
941 my_friendly_assert (TREE_TYPE (h->list) == TREE_TYPE (list), 299);
947 /* Add an entry to the list-hash-table
948 for a list TYPE whose hash code is HASHCODE. */
951 list_hash_add (hashcode, list)
955 register struct list_hash *h;
957 h = (struct list_hash *) obstack_alloc (&class_obstack, sizeof (struct list_hash));
958 h->hashcode = hashcode;
960 h->next = list_hash_table[hashcode % TYPE_HASH_SIZE];
961 list_hash_table[hashcode % TYPE_HASH_SIZE] = h;
964 /* Given TYPE, and HASHCODE its hash code, return the canonical
965 object for an identical list if one already exists.
966 Otherwise, return TYPE, and record it as the canonical object
967 if it is a permanent object.
969 To use this function, first create a list of the sort you want.
970 Then compute its hash code from the fields of the list that
971 make it different from other similar lists.
972 Then call this function and use the value.
973 This function frees the list you pass in if it is a duplicate. */
975 /* Set to 1 to debug without canonicalization. Never set by program. */
976 static int debug_no_list_hash = 0;
979 list_hash_canon (hashcode, list)
985 if (debug_no_list_hash)
988 t1 = list_hash_lookup (hashcode, list);
991 obstack_free (&class_obstack, list);
995 /* If this is a new list, record it for later reuse. */
996 list_hash_add (hashcode, list);
1002 hash_tree_cons (via_public, via_virtual, via_protected, purpose, value, chain)
1003 int via_public, via_virtual, via_protected;
1004 tree purpose, value, chain;
1006 struct obstack *ambient_obstack = current_obstack;
1010 current_obstack = &class_obstack;
1011 t = tree_cons (purpose, value, chain);
1012 TREE_VIA_PUBLIC (t) = via_public;
1013 TREE_VIA_PROTECTED (t) = via_protected;
1014 TREE_VIA_VIRTUAL (t) = via_virtual;
1015 hashcode = list_hash (t);
1016 t = list_hash_canon (hashcode, t);
1017 current_obstack = ambient_obstack;
1021 /* Constructor for hashed lists. */
1023 hash_tree_chain (value, chain)
1026 struct obstack *ambient_obstack = current_obstack;
1030 current_obstack = &class_obstack;
1031 t = tree_cons (NULL_TREE, value, chain);
1032 hashcode = list_hash (t);
1033 t = list_hash_canon (hashcode, t);
1034 current_obstack = ambient_obstack;
1038 /* Similar, but used for concatenating two lists. */
1040 hash_chainon (list1, list2)
1047 if (TREE_CHAIN (list1) == NULL_TREE)
1048 return hash_tree_chain (TREE_VALUE (list1), list2);
1049 return hash_tree_chain (TREE_VALUE (list1),
1050 hash_chainon (TREE_CHAIN (list1), list2));
1054 get_identifier_list (value)
1057 tree list = IDENTIFIER_AS_LIST (value);
1058 if (list != NULL_TREE
1059 && (TREE_CODE (list) != TREE_LIST
1060 || TREE_VALUE (list) != value))
1062 else if (IDENTIFIER_HAS_TYPE_VALUE (value)
1063 && TREE_CODE (IDENTIFIER_TYPE_VALUE (value)) == RECORD_TYPE
1064 && IDENTIFIER_TYPE_VALUE (value)
1065 == TYPE_MAIN_VARIANT (IDENTIFIER_TYPE_VALUE (value)))
1067 tree type = IDENTIFIER_TYPE_VALUE (value);
1069 if (TYPE_PTRMEMFUNC_P (type))
1071 else if (type == current_class_type)
1072 /* Don't mess up the constructor name. */
1073 list = tree_cons (NULL_TREE, value, NULL_TREE);
1076 if (! CLASSTYPE_ID_AS_LIST (type))
1077 CLASSTYPE_ID_AS_LIST (type)
1078 = perm_tree_cons (NULL_TREE, TYPE_IDENTIFIER (type), NULL_TREE);
1079 list = CLASSTYPE_ID_AS_LIST (type);
1086 get_decl_list (value)
1089 tree list = NULL_TREE;
1091 if (TREE_CODE (value) == IDENTIFIER_NODE)
1092 list = get_identifier_list (value);
1093 else if (TREE_CODE (value) == RECORD_TYPE
1094 && TYPE_LANG_SPECIFIC (value)
1095 && value == TYPE_MAIN_VARIANT (value))
1096 list = CLASSTYPE_AS_LIST (value);
1098 if (list != NULL_TREE)
1100 my_friendly_assert (TREE_CHAIN (list) == NULL_TREE, 301);
1104 return build_decl_list (NULL_TREE, value);
1107 /* Build an association between TYPE and some parameters:
1109 OFFSET is the offset added to `this' to convert it to a pointer
1112 BINFO is the base binfo to use, if we are deriving from one. This
1113 is necessary, as we want specialized parent binfos from base
1114 classes, so that the VTABLE_NAMEs of bases are for the most derived
1115 type, instead of of the simple type.
1117 VTABLE is the virtual function table with which to initialize
1118 sub-objects of type TYPE.
1120 VIRTUALS are the virtual functions sitting in VTABLE.
1122 CHAIN are more associations we must retain. */
1125 make_binfo (offset, binfo, vtable, virtuals, chain)
1127 tree vtable, virtuals;
1130 tree new_binfo = make_tree_vec (6);
1133 if (TREE_CODE (binfo) == TREE_VEC)
1134 type = BINFO_TYPE (binfo);
1138 binfo = TYPE_BINFO (binfo);
1141 TREE_CHAIN (new_binfo) = chain;
1143 TREE_USED (new_binfo) = TREE_USED (chain);
1145 TREE_TYPE (new_binfo) = TYPE_MAIN_VARIANT (type);
1146 BINFO_OFFSET (new_binfo) = offset;
1147 BINFO_VTABLE (new_binfo) = vtable;
1148 BINFO_VIRTUALS (new_binfo) = virtuals;
1149 BINFO_VPTR_FIELD (new_binfo) = NULL_TREE;
1151 if (binfo && BINFO_BASETYPES (binfo) != NULL_TREE)
1152 BINFO_BASETYPES (new_binfo) = copy_node (BINFO_BASETYPES (binfo));
1156 /* Return the binfo value for ELEM in TYPE. */
1159 binfo_value (elem, type)
1163 if (get_base_distance (elem, type, 0, (tree *)0) == -2)
1164 compiler_error ("base class `%s' ambiguous in binfo_value",
1165 TYPE_NAME_STRING (elem));
1167 return TYPE_BINFO (type);
1168 if (TREE_CODE (elem) == RECORD_TYPE && TYPE_BINFO (elem) == type)
1170 return get_binfo (elem, type, 0);
1177 register tree prev = 0, tmp, next;
1178 for (tmp = path; tmp; tmp = next)
1180 next = BINFO_INHERITANCE_CHAIN (tmp);
1181 BINFO_INHERITANCE_CHAIN (tmp) = prev;
1191 unsigned HOST_WIDE_INT n;
1194 fprintf (stderr, "type \"%s\"; offset = %d\n",
1195 TYPE_NAME_STRING (BINFO_TYPE (elem)),
1196 TREE_INT_CST_LOW (BINFO_OFFSET (elem)));
1197 fprintf (stderr, "vtable type:\n");
1198 debug_tree (BINFO_TYPE (elem));
1199 if (BINFO_VTABLE (elem))
1200 fprintf (stderr, "vtable decl \"%s\"\n", IDENTIFIER_POINTER (DECL_NAME (BINFO_VTABLE (elem))));
1202 fprintf (stderr, "no vtable decl yet\n");
1203 fprintf (stderr, "virtuals:\n");
1204 virtuals = BINFO_VIRTUALS (elem);
1206 n = skip_rtti_stuff (&virtuals);
1210 tree fndecl = TREE_OPERAND (FNADDR_FROM_VTABLE_ENTRY (TREE_VALUE (virtuals)), 0);
1211 fprintf (stderr, "%s [%d =? %d]\n",
1212 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (fndecl)),
1213 n, TREE_INT_CST_LOW (DECL_VINDEX (fndecl)));
1215 virtuals = TREE_CHAIN (virtuals);
1219 /* Return the length of a chain of nodes chained through DECL_CHAIN.
1220 We expect a null pointer to mark the end of the chain.
1221 This is the Lisp primitive `length'. */
1224 decl_list_length (t)
1228 register int len = 0;
1230 my_friendly_assert (TREE_CODE (t) == FUNCTION_DECL
1231 || TREE_CODE (t) == TEMPLATE_DECL, 300);
1232 for (tail = t; tail; tail = DECL_CHAIN (tail))
1242 if (TREE_CODE (t) == FUNCTION_DECL)
1244 else if (TREE_CODE (t) == TREE_LIST)
1245 return decl_list_length (TREE_VALUE (t));
1247 my_friendly_abort (359);
1252 is_overloaded_fn (x)
1255 if (TREE_CODE (x) == FUNCTION_DECL)
1258 if (TREE_CODE (x) == TREE_LIST
1259 && (TREE_CODE (TREE_VALUE (x)) == FUNCTION_DECL
1260 || TREE_CODE (TREE_VALUE (x)) == TEMPLATE_DECL))
1267 really_overloaded_fn (x)
1270 if (TREE_CODE (x) == TREE_LIST
1271 && (TREE_CODE (TREE_VALUE (x)) == FUNCTION_DECL
1272 || TREE_CODE (TREE_VALUE (x)) == TEMPLATE_DECL))
1282 if (TREE_CODE (from) == FUNCTION_DECL)
1285 my_friendly_assert (TREE_CODE (from) == TREE_LIST, 9);
1287 return TREE_VALUE (from);
1291 fnaddr_from_vtable_entry (entry)
1294 if (flag_vtable_thunks)
1297 if (TREE_CODE (func) == ADDR_EXPR)
1298 func = TREE_OPERAND (func, 0);
1299 if (TREE_CODE (func) == THUNK_DECL)
1300 return DECL_INITIAL (func);
1305 return TREE_VALUE (TREE_CHAIN (TREE_CHAIN (CONSTRUCTOR_ELTS (entry))));
1309 function_arg_chain (t)
1312 return TREE_CHAIN (TYPE_ARG_TYPES (TREE_TYPE (t)));
1316 promotes_to_aggr_type (t, code)
1318 enum tree_code code;
1320 if (TREE_CODE (t) == code)
1322 return IS_AGGR_TYPE (t);
1326 is_aggr_type_2 (t1, t2)
1329 if (TREE_CODE (t1) != TREE_CODE (t2))
1331 return IS_AGGR_TYPE (t1) && IS_AGGR_TYPE (t2);
1334 /* Give message using types TYPE1 and TYPE2 as arguments.
1335 PFN is the function which will print the message;
1336 S is the format string for PFN to use. */
1338 message_2_types (pfn, s, type1, type2)
1343 tree name1 = TYPE_NAME (type1);
1344 tree name2 = TYPE_NAME (type2);
1345 if (TREE_CODE (name1) == TYPE_DECL)
1346 name1 = DECL_NAME (name1);
1347 if (TREE_CODE (name2) == TYPE_DECL)
1348 name2 = DECL_NAME (name2);
1349 (*pfn) (s, IDENTIFIER_POINTER (name1), IDENTIFIER_POINTER (name2));
1352 #define PRINT_RING_SIZE 4
1355 lang_printable_name (decl)
1358 static tree decl_ring[PRINT_RING_SIZE];
1359 static char *print_ring[PRINT_RING_SIZE];
1360 static int ring_counter;
1363 /* Only cache functions. */
1364 if (TREE_CODE (decl) != FUNCTION_DECL
1365 || DECL_LANG_SPECIFIC (decl) == 0)
1366 return decl_as_string (decl, 1);
1368 /* See if this print name is lying around. */
1369 for (i = 0; i < PRINT_RING_SIZE; i++)
1370 if (decl_ring[i] == decl)
1371 /* yes, so return it. */
1372 return print_ring[i];
1374 if (++ring_counter == PRINT_RING_SIZE)
1377 if (current_function_decl != NULL_TREE)
1379 if (decl_ring[ring_counter] == current_function_decl)
1381 if (ring_counter == PRINT_RING_SIZE)
1383 if (decl_ring[ring_counter] == current_function_decl)
1384 my_friendly_abort (106);
1387 if (print_ring[ring_counter])
1388 free (print_ring[ring_counter]);
1391 int print_ret_type_p
1392 = (!DECL_CONSTRUCTOR_P (decl)
1393 && !DESTRUCTOR_NAME_P (DECL_ASSEMBLER_NAME (decl)));
1395 char *name = (char *)decl_as_string (decl, print_ret_type_p);
1396 print_ring[ring_counter] = (char *)malloc (strlen (name) + 1);
1397 strcpy (print_ring[ring_counter], name);
1398 decl_ring[ring_counter] = decl;
1400 return print_ring[ring_counter];
1403 /* Build the FUNCTION_TYPE or METHOD_TYPE which may throw exceptions
1404 listed in RAISES. */
1406 build_exception_variant (type, raises)
1410 tree v = TYPE_MAIN_VARIANT (type);
1411 int constp = TYPE_READONLY (type);
1412 int volatilep = TYPE_VOLATILE (type);
1414 for (; v; v = TYPE_NEXT_VARIANT (v))
1416 if (TYPE_READONLY (v) != constp
1417 || TYPE_VOLATILE (v) != volatilep)
1420 /* @@ This should do set equality, not exact match. */
1421 if (simple_cst_list_equal (TYPE_RAISES_EXCEPTIONS (v), raises))
1422 /* List of exceptions raised matches previously found list.
1424 @@ Nice to free up storage used in consing up the
1425 @@ list of exceptions raised. */
1429 /* Need to build a new variant. */
1430 v = build_type_copy (type);
1432 if (raises && ! TREE_PERMANENT (raises))
1434 push_obstacks_nochange ();
1435 end_temporary_allocation ();
1436 raises = copy_list (raises);
1440 TYPE_RAISES_EXCEPTIONS (v) = raises;
1444 /* Subroutine of copy_to_permanent
1446 Assuming T is a node build bottom-up, make it all exist on
1447 permanent obstack, if it is not permanent already. */
1459 if (tmp = func (t), tmp != NULL_TREE)
1462 switch (TREE_CODE (t))
1465 return error_mark_node;
1474 tree chain = TREE_CHAIN (t);
1476 TREE_CHAIN (t) = mapcar (chain, func);
1477 TREE_TYPE (t) = mapcar (TREE_TYPE (t), func);
1478 DECL_INITIAL (t) = mapcar (DECL_INITIAL (t), func);
1479 DECL_SIZE (t) = mapcar (DECL_SIZE (t), func);
1485 tree chain = TREE_CHAIN (t);
1487 TREE_PURPOSE (t) = mapcar (TREE_PURPOSE (t), func);
1488 TREE_VALUE (t) = mapcar (TREE_VALUE (t), func);
1489 TREE_CHAIN (t) = mapcar (chain, func);
1495 int len = TREE_VEC_LENGTH (t);
1499 TREE_VEC_ELT (t, len) = mapcar (TREE_VEC_ELT (t, len), func);
1506 return copy_node (t);
1512 TREE_OPERAND (t, 0) = mapcar (TREE_OPERAND (t, 0), func);
1513 TREE_OPERAND (t, 1) = mapcar (TREE_OPERAND (t, 1), func);
1514 TREE_OPERAND (t, 2) = mapcar (TREE_OPERAND (t, 2), func);
1519 TREE_OPERAND (t, 0) = mapcar (TREE_OPERAND (t, 0), func);
1526 case TRUNC_DIV_EXPR:
1527 case TRUNC_MOD_EXPR:
1535 case BIT_ANDTC_EXPR:
1536 case TRUTH_ANDIF_EXPR:
1537 case TRUTH_ORIF_EXPR:
1545 case FLOOR_DIV_EXPR:
1546 case ROUND_DIV_EXPR:
1548 case FLOOR_MOD_EXPR:
1549 case ROUND_MOD_EXPR:
1551 case PREDECREMENT_EXPR:
1552 case PREINCREMENT_EXPR:
1553 case POSTDECREMENT_EXPR:
1554 case POSTINCREMENT_EXPR:
1559 TREE_OPERAND (t, 0) = mapcar (TREE_OPERAND (t, 0), func);
1560 TREE_OPERAND (t, 1) = mapcar (TREE_OPERAND (t, 1), func);
1568 case TRUTH_NOT_EXPR:
1572 TREE_OPERAND (t, 0) = mapcar (TREE_OPERAND (t, 0), func);
1576 tmp = build_pointer_type (mapcar (TREE_TYPE (t), func));
1577 return cp_build_type_variant (tmp, TYPE_READONLY (t), TYPE_VOLATILE (t));
1578 case REFERENCE_TYPE:
1579 tmp = build_reference_type (mapcar (TREE_TYPE (t), func));
1580 return cp_build_type_variant (tmp, TYPE_READONLY (t), TYPE_VOLATILE (t));
1582 tmp = build_function_type (mapcar (TREE_TYPE (t), func),
1583 mapcar (TYPE_ARG_TYPES (t), func));
1584 return cp_build_type_variant (tmp, TYPE_READONLY (t), TYPE_VOLATILE (t));
1586 tmp = build_array_type (mapcar (TREE_TYPE (t), func),
1587 mapcar (TYPE_DOMAIN (t), func));
1588 return cp_build_type_variant (tmp, TYPE_READONLY (t), TYPE_VOLATILE (t));
1590 tmp = build_index_type (mapcar (TYPE_MAX_VALUE (t), func));
1591 return cp_build_type_variant (tmp, TYPE_READONLY (t), TYPE_VOLATILE (t));
1593 tmp = build_offset_type (mapcar (TYPE_OFFSET_BASETYPE (t), func),
1594 mapcar (TREE_TYPE (t), func));
1595 return cp_build_type_variant (tmp, TYPE_READONLY (t), TYPE_VOLATILE (t));
1597 tmp = build_cplus_method_type
1598 (mapcar (TREE_TYPE (TREE_VALUE (TYPE_ARG_TYPES (t))), func),
1599 mapcar (TREE_TYPE (t), func),
1600 mapcar (TREE_CHAIN (TYPE_ARG_TYPES (t)), func));
1601 return cp_build_type_variant (tmp, TYPE_READONLY (t), TYPE_VOLATILE (t));
1604 if (TYPE_PTRMEMFUNC_P (t))
1605 return build_ptrmemfunc_type
1606 (mapcar (TYPE_PTRMEMFUNC_FN_TYPE (t), func));
1607 /* else fall through */
1609 /* This list is incomplete, but should suffice for now.
1610 It is very important that `sorry' not call
1611 `report_error_function'. That could cause an infinite loop. */
1613 sorry ("initializer contains unrecognized tree code");
1614 return error_mark_node;
1617 my_friendly_abort (107);
1626 if (TREE_PERMANENT (t))
1628 /* Support `void f () { extern int i; A<&i> a; }' */
1629 if ((TREE_CODE (t) == VAR_DECL || TREE_CODE (t) == FUNCTION_DECL)
1631 return copy_node (t);
1635 /* Assuming T is a node built bottom-up, make it all exist on
1636 permanent obstack, if it is not permanent already. */
1638 copy_to_permanent (t)
1641 register struct obstack *ambient_obstack = current_obstack;
1642 register struct obstack *ambient_saveable_obstack = saveable_obstack;
1645 if (t == NULL_TREE || TREE_PERMANENT (t))
1648 saveable_obstack = &permanent_obstack;
1649 current_obstack = saveable_obstack;
1650 resume = suspend_momentary ();
1652 t = mapcar (t, perm_manip);
1654 resume_momentary (resume);
1655 current_obstack = ambient_obstack;
1656 saveable_obstack = ambient_saveable_obstack;
1661 #ifdef GATHER_STATISTICS
1662 extern int depth_reached;
1666 print_lang_statistics ()
1668 extern struct obstack maybepermanent_obstack, decl_obstack;
1669 print_obstack_statistics ("class_obstack", &class_obstack);
1670 print_obstack_statistics ("decl_obstack", &decl_obstack);
1671 print_obstack_statistics ("permanent_obstack", &permanent_obstack);
1672 print_obstack_statistics ("maybepermanent_obstack", &maybepermanent_obstack);
1673 print_search_statistics ();
1674 print_class_statistics ();
1675 #ifdef GATHER_STATISTICS
1676 fprintf (stderr, "maximum template instantiation depth reached: %d\n",
1681 /* This is used by the `assert' macro. It is provided in libgcc.a,
1682 which `cc' doesn't know how to link. Note that the C++ front-end
1683 no longer actually uses the `assert' macro (instead, it calls
1684 my_friendly_assert). But all of the back-end files still need this. */
1686 __eprintf (string, expression, line, filename)
1689 const char *expression;
1691 const char *filename;
1699 fprintf (stderr, string, expression, line, filename);
1704 /* Return, as an INTEGER_CST node, the number of elements for
1705 TYPE (which is an ARRAY_TYPE). This counts only elements of the top array. */
1708 array_type_nelts_top (type)
1711 return fold (build (PLUS_EXPR, sizetype,
1712 array_type_nelts (type),
1716 /* Return, as an INTEGER_CST node, the number of elements for
1717 TYPE (which is an ARRAY_TYPE). This one is a recursive count of all
1718 ARRAY_TYPEs that are clumped together. */
1721 array_type_nelts_total (type)
1724 tree sz = array_type_nelts_top (type);
1725 type = TREE_TYPE (type);
1726 while (TREE_CODE (type) == ARRAY_TYPE)
1728 tree n = array_type_nelts_top (type);
1729 sz = fold (build (MULT_EXPR, sizetype, sz, n));
1730 type = TREE_TYPE (type);
1740 if (TREE_CODE (t) != TREE_LIST && ! TREE_SIDE_EFFECTS (t))
1742 else if (TREE_CODE (t) == TARGET_EXPR)
1743 return build_cplus_new (TREE_TYPE (t),
1744 break_out_target_exprs (TREE_OPERAND (t, 1)));
1748 /* Actually, we'll just clean out the target exprs for the moment. */
1750 break_out_target_exprs (t)
1753 return mapcar (t, bot_manip);
1756 /* Arrange for an expression to be expanded multiple independent
1757 times. This is useful for cleanup actions, as the backend can
1758 expand them multiple times in different places. */
1765 /* If this is already protected, no sense in protecting it again. */
1766 if (TREE_CODE (expr) == UNSAVE_EXPR)
1769 t = build1 (UNSAVE_EXPR, TREE_TYPE (expr), expr);
1770 TREE_SIDE_EFFECTS (t) = TREE_SIDE_EFFECTS (expr);
1774 /* Modify a tree in place so that all the evaluate only once things
1775 are cleared out. Return the EXPR given. */
1777 unsave_expr_now (expr)
1780 enum tree_code code;
1783 if (expr == NULL_TREE)
1786 code = TREE_CODE (expr);
1790 SAVE_EXPR_RTL (expr) = NULL_RTX;
1794 sorry ("TARGET_EXPR reused inside UNSAVE_EXPR");
1798 warning ("RTL_EXPR reused inside UNSAVE_EXPR");
1799 RTL_EXPR_SEQUENCE (expr) = NULL_RTX;
1803 CALL_EXPR_RTL (expr) = NULL_RTX;
1804 if (TREE_OPERAND (expr, 1)
1805 && TREE_CODE (TREE_OPERAND (expr, 1)) == TREE_LIST)
1807 tree exp = TREE_OPERAND (expr, 1);
1810 unsave_expr_now (TREE_VALUE (exp));
1811 exp = TREE_CHAIN (exp);
1817 switch (TREE_CODE_CLASS (code))
1819 case 'c': /* a constant */
1820 case 't': /* a type node */
1821 case 'x': /* something random, like an identifier or an ERROR_MARK. */
1822 case 'd': /* A decl node */
1823 case 'b': /* A block node */
1826 case 'e': /* an expression */
1827 case 'r': /* a reference */
1828 case 's': /* an expression with side effects */
1829 case '<': /* a comparison expression */
1830 case '2': /* a binary arithmetic expression */
1831 case '1': /* a unary arithmetic expression */
1832 for (i = tree_code_length[(int) code] - 1; i >= 0; i--)
1833 unsave_expr_now (TREE_OPERAND (expr, i));
1837 my_friendly_abort (999);
1841 /* Since cleanup may have SAVE_EXPRs in it, we protect it with an
1842 UNSAVE_EXPR as the backend cannot yet handle SAVE_EXPRs in cleanups
1845 cp_expand_decl_cleanup (decl, cleanup)
1848 return expand_decl_cleanup (decl, unsave_expr (cleanup));
1851 /* Obstack used for allocating nodes in template function and variable
1854 extern struct obstack *expression_obstack;
1856 /* Similar to `build_nt', except we build
1857 on the permanent_obstack, regardless. */
1860 build_min_nt VPROTO((enum tree_code code, ...))
1863 enum tree_code code;
1865 register struct obstack *ambient_obstack = expression_obstack;
1868 register int length;
1874 code = va_arg (p, enum tree_code);
1877 expression_obstack = &permanent_obstack;
1879 t = make_node (code);
1880 length = tree_code_length[(int) code];
1881 TREE_COMPLEXITY (t) = lineno;
1883 for (i = 0; i < length; i++)
1885 tree x = va_arg (p, tree);
1886 TREE_OPERAND (t, i) = copy_to_permanent (x);
1890 expression_obstack = ambient_obstack;
1894 /* Similar to `build', except we build
1895 on the permanent_obstack, regardless. */
1898 build_min VPROTO((enum tree_code code, tree tt, ...))
1901 enum tree_code code;
1904 register struct obstack *ambient_obstack = expression_obstack;
1907 register int length;
1913 code = va_arg (p, enum tree_code);
1914 tt = va_arg (p, tree);
1917 expression_obstack = &permanent_obstack;
1919 t = make_node (code);
1920 length = tree_code_length[(int) code];
1922 TREE_COMPLEXITY (t) = lineno;
1924 for (i = 0; i < length; i++)
1926 tree x = va_arg (p, tree);
1927 TREE_OPERAND (t, i) = copy_to_permanent (x);
1931 expression_obstack = ambient_obstack;
1935 /* Same as `tree_cons' but make a permanent object. */
1938 min_tree_cons (purpose, value, chain)
1939 tree purpose, value, chain;
1942 register struct obstack *ambient_obstack = current_obstack;
1943 current_obstack = &permanent_obstack;
1945 node = tree_cons (copy_to_permanent (purpose),
1946 copy_to_permanent (value), chain);
1947 current_obstack = ambient_obstack;
1955 if (TREE_CODE (t) == IDENTIFIER_NODE)
1956 return identifier_typedecl_value (t);
1957 if (TREE_CODE (t) == TYPE_DECL)
1959 if (TREE_CODE_CLASS (TREE_CODE (t)) == 't')
1960 return TYPE_STUB_DECL (t);
1962 my_friendly_abort (42);
1966 can_free (obstack, t)
1967 struct obstack *obstack;
1972 if (TREE_CODE (t) == TREE_VEC)
1973 size = (TREE_VEC_LENGTH (t)-1) * sizeof (tree) + sizeof (struct tree_vec);
1975 my_friendly_abort (42);
1977 #define ROUND(x) ((x + obstack_alignment_mask (obstack)) \
1978 & ~ obstack_alignment_mask (obstack))
1979 if ((char *)t + ROUND (size) == obstack_next_free (obstack))
1986 /* Return first vector element whose BINFO_TYPE is ELEM.
1987 Return 0 if ELEM is not in VEC. VEC may be NULL_TREE. */
1990 vec_binfo_member (elem, vec)
1996 for (i = 0; i < TREE_VEC_LENGTH (vec); ++i)
1997 if (elem == BINFO_TYPE (TREE_VEC_ELT (vec, i)))
1998 return TREE_VEC_ELT (vec, i);
2003 /* Kludge around the fact that DECL_CONTEXT for virtual functions returns
2004 the wrong thing for decl_function_context. Hopefully the uses in the
2005 backend won't matter, since we don't need a static chain for local class
2009 hack_decl_function_context (decl)
2012 if (TREE_CODE (decl) == FUNCTION_DECL && DECL_FUNCTION_MEMBER_P (decl))
2013 return decl_function_context (TYPE_MAIN_DECL (DECL_CLASS_CONTEXT (decl)));
2014 return decl_function_context (decl);
2017 /* Return truthvalue of whether T1 is the same tree structure as T2.
2018 Return 1 if they are the same.
2019 Return 0 if they are understandably different.
2020 Return -1 if either contains tree structure not understood by
2024 cp_tree_equal (t1, t2)
2027 register enum tree_code code1, code2;
2032 if (t1 == 0 || t2 == 0)
2035 code1 = TREE_CODE (t1);
2036 code2 = TREE_CODE (t2);
2038 if (code1 == NOP_EXPR || code1 == CONVERT_EXPR || code1 == NON_LVALUE_EXPR)
2039 if (code2 == NOP_EXPR || code2 == CONVERT_EXPR || code2 == NON_LVALUE_EXPR)
2040 return cp_tree_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
2042 return cp_tree_equal (TREE_OPERAND (t1, 0), t2);
2043 else if (code2 == NOP_EXPR || code2 == CONVERT_EXPR
2044 || code2 == NON_LVALUE_EXPR)
2045 return cp_tree_equal (t1, TREE_OPERAND (t2, 0));
2053 return TREE_INT_CST_LOW (t1) == TREE_INT_CST_LOW (t2)
2054 && TREE_INT_CST_HIGH (t1) == TREE_INT_CST_HIGH (t2);
2057 return REAL_VALUES_EQUAL (TREE_REAL_CST (t1), TREE_REAL_CST (t2));
2060 return TREE_STRING_LENGTH (t1) == TREE_STRING_LENGTH (t2)
2061 && !bcmp (TREE_STRING_POINTER (t1), TREE_STRING_POINTER (t2),
2062 TREE_STRING_LENGTH (t1));
2068 return cp_tree_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
2071 cmp = cp_tree_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
2074 return simple_cst_list_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
2077 /* Special case: if either target is an unallocated VAR_DECL,
2078 it means that it's going to be unified with whatever the
2079 TARGET_EXPR is really supposed to initialize, so treat it
2080 as being equivalent to anything. */
2081 if ((TREE_CODE (TREE_OPERAND (t1, 0)) == VAR_DECL
2082 && DECL_NAME (TREE_OPERAND (t1, 0)) == NULL_TREE
2083 && DECL_RTL (TREE_OPERAND (t1, 0)) == 0)
2084 || (TREE_CODE (TREE_OPERAND (t2, 0)) == VAR_DECL
2085 && DECL_NAME (TREE_OPERAND (t2, 0)) == NULL_TREE
2086 && DECL_RTL (TREE_OPERAND (t2, 0)) == 0))
2089 cmp = cp_tree_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
2092 return cp_tree_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
2094 case WITH_CLEANUP_EXPR:
2095 cmp = cp_tree_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
2098 return cp_tree_equal (TREE_OPERAND (t1, 2), TREE_OPERAND (t1, 2));
2101 if (TREE_OPERAND (t1, 1) == TREE_OPERAND (t2, 1))
2102 return cp_tree_equal (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0));
2111 case TEMPLATE_CONST_PARM:
2112 return TEMPLATE_CONST_IDX (t1) == TEMPLATE_CONST_IDX (t2);
2115 if (TREE_CODE (TREE_OPERAND (t1, 0)) != TREE_CODE (TREE_OPERAND (t2, 0)))
2117 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (t1, 0))) == 't')
2118 return comptypes (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0), 1);
2122 switch (TREE_CODE_CLASS (code1))
2132 for (i=0; i<tree_code_length[(int) code1]; ++i)
2134 cmp = cp_tree_equal (TREE_OPERAND (t1, i), TREE_OPERAND (t2, i));