/* Language-dependent node constructors for parse phase of GNU compiler.
Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
- 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2007, 2008, 2009
+ 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2007, 2008, 2009, 2010
Free Software Foundation, Inc.
Hacked by Michael Tiemann (tiemann@cygnus.com)
#include "tree.h"
#include "cp-tree.h"
#include "flags.h"
-#include "real.h"
-#include "rtl.h"
-#include "toplev.h"
-#include "insn-config.h"
-#include "integrate.h"
#include "tree-inline.h"
#include "debug.h"
-#include "target.h"
#include "convert.h"
-#include "tree-flow.h"
+#include "cgraph.h"
+#include "splay-tree.h"
+#include "gimple.h" /* gimple_has_body_p */
static tree bot_manip (tree *, int *, void *);
static tree bot_replace (tree *, int *, void *);
-static tree build_cplus_array_type_1 (tree, tree);
static int list_hash_eq (const void *, const void *);
static hashval_t list_hash_pieces (tree, tree, tree);
static hashval_t list_hash (const void *);
-static cp_lvalue_kind lvalue_p_1 (const_tree);
static tree build_target_expr (tree, tree);
static tree count_trees_r (tree *, int *, void *);
static tree verify_stmt_tree_r (tree *, int *, void *);
/* If REF is an lvalue, returns the kind of lvalue that REF is.
Otherwise, returns clk_none. */
-static cp_lvalue_kind
-lvalue_p_1 (const_tree ref)
+cp_lvalue_kind
+lvalue_kind (const_tree ref)
{
cp_lvalue_kind op1_lvalue_kind = clk_none;
cp_lvalue_kind op2_lvalue_kind = clk_none;
if (TREE_CODE (ref) == INDIRECT_REF
&& TREE_CODE (TREE_TYPE (TREE_OPERAND (ref, 0)))
== REFERENCE_TYPE)
- return lvalue_p_1 (TREE_OPERAND (ref, 0));
+ return lvalue_kind (TREE_OPERAND (ref, 0));
- if (TREE_CODE (TREE_TYPE (ref)) == REFERENCE_TYPE)
+ if (TREE_TYPE (ref)
+ && TREE_CODE (TREE_TYPE (ref)) == REFERENCE_TYPE)
{
/* unnamed rvalue references are rvalues */
if (TYPE_REF_IS_RVALUE (TREE_TYPE (ref))
case WITH_CLEANUP_EXPR:
case REALPART_EXPR:
case IMAGPART_EXPR:
- return lvalue_p_1 (TREE_OPERAND (ref, 0));
+ return lvalue_kind (TREE_OPERAND (ref, 0));
case COMPONENT_REF:
- op1_lvalue_kind = lvalue_p_1 (TREE_OPERAND (ref, 0));
+ op1_lvalue_kind = lvalue_kind (TREE_OPERAND (ref, 0));
/* Look at the member designator. */
if (!op1_lvalue_kind)
;
return clk_ordinary;
break;
- /* A currently unresolved scope ref. */
+ /* A scope ref in a template, left as SCOPE_REF to support later
+ access checking. */
case SCOPE_REF:
- gcc_unreachable ();
+ gcc_assert (!type_dependent_expression_p (CONST_CAST_TREE(ref)));
+ return lvalue_kind (TREE_OPERAND (ref, 1));
+
case MAX_EXPR:
case MIN_EXPR:
/* Disallow <? and >? as lvalues if either argument side-effects. */
if (TREE_SIDE_EFFECTS (TREE_OPERAND (ref, 0))
|| TREE_SIDE_EFFECTS (TREE_OPERAND (ref, 1)))
return clk_none;
- op1_lvalue_kind = lvalue_p_1 (TREE_OPERAND (ref, 0));
- op2_lvalue_kind = lvalue_p_1 (TREE_OPERAND (ref, 1));
+ op1_lvalue_kind = lvalue_kind (TREE_OPERAND (ref, 0));
+ op2_lvalue_kind = lvalue_kind (TREE_OPERAND (ref, 1));
break;
case COND_EXPR:
- op1_lvalue_kind = lvalue_p_1 (TREE_OPERAND (ref, 1)
+ op1_lvalue_kind = lvalue_kind (TREE_OPERAND (ref, 1)
? TREE_OPERAND (ref, 1)
: TREE_OPERAND (ref, 0));
- op2_lvalue_kind = lvalue_p_1 (TREE_OPERAND (ref, 2));
+ op2_lvalue_kind = lvalue_kind (TREE_OPERAND (ref, 2));
break;
case MODIFY_EXPR:
return clk_ordinary;
case COMPOUND_EXPR:
- return lvalue_p_1 (TREE_OPERAND (ref, 1));
+ return lvalue_kind (TREE_OPERAND (ref, 1));
case TARGET_EXPR:
return clk_class;
with a BASELINK. */
/* This CONST_CAST is okay because BASELINK_FUNCTIONS returns
its argument unmodified and we assign it to a const_tree. */
- return lvalue_p_1 (BASELINK_FUNCTIONS (CONST_CAST_TREE (ref)));
+ return lvalue_kind (BASELINK_FUNCTIONS (CONST_CAST_TREE (ref)));
case NON_DEPENDENT_EXPR:
/* We must consider NON_DEPENDENT_EXPRs to be lvalues so that
computes the C++ definition of lvalue. */
cp_lvalue_kind
-real_lvalue_p (tree ref)
+real_lvalue_p (const_tree ref)
{
- cp_lvalue_kind kind = lvalue_p_1 (ref);
+ cp_lvalue_kind kind = lvalue_kind (ref);
if (kind & (clk_rvalueref|clk_class))
return clk_none;
else
bool
lvalue_p (const_tree ref)
{
- return (lvalue_p_1 (ref) != clk_none);
+ return (lvalue_kind (ref) != clk_none);
}
/* This differs from real_lvalue_p in that rvalues formed by dereferencing
bool
lvalue_or_rvalue_with_address_p (const_tree ref)
{
- cp_lvalue_kind kind = lvalue_p_1 (ref);
+ cp_lvalue_kind kind = lvalue_kind (ref);
if (kind & clk_class)
return false;
else
AGGR_INIT_EXPR_ARGP (init));
TREE_SIDE_EFFECTS (rval) = 1;
AGGR_INIT_VIA_CTOR_P (rval) = is_ctor;
+ TREE_NOTHROW (rval) = TREE_NOTHROW (init);
}
else
rval = init;
if (TREE_CODE (rval) == AGGR_INIT_EXPR)
slot = AGGR_INIT_EXPR_SLOT (rval);
- else if (TREE_CODE (rval) == CALL_EXPR)
+ else if (TREE_CODE (rval) == CALL_EXPR
+ || TREE_CODE (rval) == CONSTRUCTOR)
slot = build_local_temp (type);
else
return rval;
return rval;
}
-/* Return a TARGET_EXPR which expresses the direct-initialization of one
- array from another. */
+/* Return a TARGET_EXPR which expresses the initialization of an array to
+ be named later, either default-initialization or copy-initialization
+ from another array of the same type. */
tree
-build_array_copy (tree init)
+build_vec_init_expr (tree type, tree init)
{
- tree type = TREE_TYPE (init);
- tree slot = build_local_temp (type);
+ tree slot;
+ tree inner_type = strip_array_types (type);
+ tree elt_init = integer_zero_node;
+ bool value_init = false;
+
+ /* Since we're deferring building the actual constructor calls until
+ gimplification time, we need to build one now and throw it away so
+ that the relevant constructor gets mark_used before cgraph decides
+ what functions are needed. Here we assume that init is either
+ NULL_TREE, void_type_node (indicating value-initialization), or
+ another array to copy. */
+ if (integer_zerop (array_type_nelts_total (type)))
+ {
+ /* No actual initialization to do. */;
+ init = NULL_TREE;
+ }
+ else if (init == void_type_node)
+ {
+ elt_init = build_value_init (inner_type, tf_warning_or_error);
+ value_init = true;
+ init = NULL_TREE;
+ }
+ else
+ {
+ gcc_assert (init == NULL_TREE
+ || (same_type_ignoring_top_level_qualifiers_p
+ (type, TREE_TYPE (init))));
+
+ if (CLASS_TYPE_P (inner_type))
+ {
+ VEC(tree,gc) *argvec = make_tree_vector ();
+ if (init)
+ {
+ tree dummy = build_dummy_object (inner_type);
+ if (!real_lvalue_p (init))
+ dummy = move (dummy);
+ VEC_quick_push (tree, argvec, dummy);
+ }
+ elt_init
+ = build_special_member_call (NULL_TREE, complete_ctor_identifier,
+ &argvec, inner_type, LOOKUP_NORMAL,
+ tf_warning_or_error);
+ }
+ }
+
+ slot = build_local_temp (type);
init = build2 (VEC_INIT_EXPR, type, slot, init);
SET_EXPR_LOCATION (init, input_location);
+
+ if (current_function_decl
+ && DECL_DECLARED_CONSTEXPR_P (current_function_decl))
+ {
+ if (potential_constant_expression (elt_init))
+ VEC_INIT_EXPR_IS_CONSTEXPR (init) = true;
+ else if (!processing_template_decl)
+ require_potential_constant_expression (elt_init);
+ }
+ VEC_INIT_EXPR_VALUE_INIT (init) = value_init;
+
init = build_target_expr (slot, init);
TARGET_EXPR_IMPLICIT_P (init) = 1;
return init;
}
+tree
+build_array_copy (tree init)
+{
+ return build_vec_init_expr (TREE_TYPE (init), init);
+}
+
/* Build a TARGET_EXPR using INIT to initialize a new temporary of the
indicated TYPE. */
{
gcc_assert (!VOID_TYPE_P (type));
- if (TREE_CODE (init) == TARGET_EXPR)
+ if (TREE_CODE (init) == TARGET_EXPR
+ || init == error_mark_node)
return init;
- else if (CLASS_TYPE_P (type) && !TYPE_HAS_TRIVIAL_INIT_REF (type)
+ else if (CLASS_TYPE_P (type) && type_has_nontrivial_copy_init (type)
&& !VOID_TYPE_P (TREE_TYPE (init))
&& TREE_CODE (init) != COND_EXPR
&& TREE_CODE (init) != CONSTRUCTOR
/* Like the above function, but without the checking. This function should
only be used by code which is deliberately trying to subvert the type
- system, such as call_builtin_trap. */
+ system, such as call_builtin_trap. Or build_over_call, to avoid
+ infinite recursion. */
tree
force_target_expr (tree type, tree init)
if (error_operand_p (expr))
return expr;
+ expr = mark_rvalue_use (expr);
+
/* [basic.lval]
Non-class rvalues always have cv-unqualified types. */
return (TREE_TYPE (t1) == t2->type && TYPE_DOMAIN (t1) == t2->domain);
}
-/* Hash table containing all of the C++ array types, including
- dependent array types and array types whose element type is
- cv-qualified. */
+/* Hash table containing dependent array types, which are unsuitable for
+ the language-independent type hash table. */
static GTY ((param_is (union tree_node))) htab_t cplus_array_htab;
+/* Like build_array_type, but handle special C++ semantics. */
-static tree
-build_cplus_array_type_1 (tree elt_type, tree index_type)
+tree
+build_cplus_array_type (tree elt_type, tree index_type)
{
tree t;
else
t = build_array_type (elt_type, index_type);
+ /* We want TYPE_MAIN_VARIANT of an array to strip cv-quals from the
+ element type as well, so fix it up if needed. */
+ if (elt_type != TYPE_MAIN_VARIANT (elt_type))
+ {
+ tree m = build_cplus_array_type (TYPE_MAIN_VARIANT (elt_type),
+ index_type);
+ if (TYPE_MAIN_VARIANT (t) != m)
+ {
+ TYPE_MAIN_VARIANT (t) = m;
+ TYPE_NEXT_VARIANT (t) = TYPE_NEXT_VARIANT (m);
+ TYPE_NEXT_VARIANT (m) = t;
+ }
+ }
+
/* Push these needs up so that initialization takes place
more easily. */
TYPE_NEEDS_CONSTRUCTING (t)
return t;
}
-tree
-build_cplus_array_type (tree elt_type, tree index_type)
-{
- tree t;
- int type_quals = cp_type_quals (elt_type);
-
- if (type_quals != TYPE_UNQUALIFIED)
- elt_type = cp_build_qualified_type (elt_type, TYPE_UNQUALIFIED);
-
- t = build_cplus_array_type_1 (elt_type, index_type);
-
- if (type_quals != TYPE_UNQUALIFIED)
- t = cp_build_qualified_type (t, type_quals);
-
- return t;
-}
-
/* Return an ARRAY_TYPE with element type ELT and length N. */
tree
if (element_type == error_mark_node)
return error_mark_node;
- /* See if we already have an identically qualified type. */
+ /* See if we already have an identically qualified type. Tests
+ should be equivalent to those in check_qualified_type. */
for (t = TYPE_MAIN_VARIANT (type); t; t = TYPE_NEXT_VARIANT (t))
- if (cp_type_quals (t) == type_quals
+ if (TREE_TYPE (t) == element_type
&& TYPE_NAME (t) == TYPE_NAME (type)
- && TYPE_CONTEXT (t) == TYPE_CONTEXT (type))
+ && TYPE_CONTEXT (t) == TYPE_CONTEXT (type)
+ && attribute_list_equal (TYPE_ATTRIBUTES (t),
+ TYPE_ATTRIBUTES (type)))
break;
if (!t)
- {
- t = build_cplus_array_type_1 (element_type, TYPE_DOMAIN (type));
+ {
+ t = build_cplus_array_type (element_type, TYPE_DOMAIN (type));
- if (TYPE_MAIN_VARIANT (t) != TYPE_MAIN_VARIANT (type))
- {
- /* Set the main variant of the newly-created ARRAY_TYPE
- (with cv-qualified element type) to the main variant of
- the unqualified ARRAY_TYPE we started with. */
- tree last_variant = t;
- tree m = TYPE_MAIN_VARIANT (type);
-
- /* Find the last variant on the new ARRAY_TYPEs list of
- variants, setting the main variant of each of the other
- types to the main variant of our unqualified
- ARRAY_TYPE. */
- while (TYPE_NEXT_VARIANT (last_variant))
- {
- TYPE_MAIN_VARIANT (last_variant) = m;
- last_variant = TYPE_NEXT_VARIANT (last_variant);
- }
-
- /* Splice in the newly-created variants. */
- TYPE_NEXT_VARIANT (last_variant) = TYPE_NEXT_VARIANT (m);
- TYPE_NEXT_VARIANT (m) = t;
- TYPE_MAIN_VARIANT (last_variant) = m;
- }
- }
+ /* Keep the typedef name. */
+ if (TYPE_NAME (t) != TYPE_NAME (type))
+ {
+ t = build_variant_type_copy (t);
+ TYPE_NAME (t) = TYPE_NAME (type);
+ }
+ }
/* Even if we already had this variant, we update
TYPE_NEEDS_CONSTRUCTING and TYPE_HAS_NONTRIVIAL_DESTRUCTOR in case
}
/* A reference or method type shall not be cv-qualified.
- [dcl.ref], [dcl.fct] */
+ [dcl.ref], [dcl.fct]. This used to be an error, but as of DR 295
+ (in CD1) we always ignore extra cv-quals on functions. */
if (type_quals & (TYPE_QUAL_CONST | TYPE_QUAL_VOLATILE)
&& (TREE_CODE (type) == REFERENCE_TYPE
+ || TREE_CODE (type) == FUNCTION_TYPE
|| TREE_CODE (type) == METHOD_TYPE))
{
- bad_quals |= type_quals & (TYPE_QUAL_CONST | TYPE_QUAL_VOLATILE);
+ if (TREE_CODE (type) == REFERENCE_TYPE)
+ bad_quals |= type_quals & (TYPE_QUAL_CONST | TYPE_QUAL_VOLATILE);
type_quals &= ~(TYPE_QUAL_CONST | TYPE_QUAL_VOLATILE);
}
+ /* But preserve any function-cv-quals on a FUNCTION_TYPE. */
+ if (TREE_CODE (type) == FUNCTION_TYPE)
+ type_quals |= type_memfn_quals (type);
+
/* A restrict-qualified type must be a pointer (or reference)
to object or incomplete type. */
if ((type_quals & TYPE_QUAL_RESTRICT)
type_quals &= ~TYPE_QUAL_RESTRICT;
}
- if (bad_quals == TYPE_UNQUALIFIED)
+ if (bad_quals == TYPE_UNQUALIFIED
+ || (complain & tf_ignore_bad_quals))
/*OK*/;
- else if (!(complain & (tf_error | tf_ignore_bad_quals)))
+ else if (!(complain & tf_error))
return error_mark_node;
else
{
- if (complain & tf_ignore_bad_quals)
- /* We're not going to warn about constifying things that can't
- be constified. */
- bad_quals &= ~TYPE_QUAL_CONST;
- if (bad_quals)
- {
- tree bad_type = build_qualified_type (ptr_type_node, bad_quals);
-
- if (!(complain & tf_ignore_bad_quals))
- error ("%qV qualifiers cannot be applied to %qT",
- bad_type, type);
- }
+ tree bad_type = build_qualified_type (ptr_type_node, bad_quals);
+ error ("%qV qualifiers cannot be applied to %qT",
+ bad_type, type);
}
/* Retrieve (or create) the appropriately qualified variant. */
tree
cv_unqualified (tree type)
{
- int quals = TYPE_QUALS (type);
+ int quals;
+
+ if (type == error_mark_node)
+ return type;
+
+ quals = cp_type_quals (type);
quals &= ~(TYPE_QUAL_CONST|TYPE_QUAL_VOLATILE);
return cp_build_qualified_type (type, quals);
}
TREE_CHAIN (arg_types));
}
else
+ {
result = build_function_type (type,
arg_types);
+ result = apply_memfn_quals (result, type_memfn_quals (t));
+ }
if (TYPE_RAISES_EXCEPTIONS (t))
result = build_exception_variant (result,
TYPE_RAISES_EXCEPTIONS (t));
}
break;
+ case TYPENAME_TYPE:
+ result = make_typename_type (strip_typedefs (TYPE_CONTEXT (t)),
+ TYPENAME_TYPE_FULLNAME (t),
+ typename_type, tf_none);
+ break;
default:
break;
}
return cp_build_qualified_type (result, cp_type_quals (t));
}
-/* Returns true iff TYPE is a type variant created for a typedef. */
-
-bool
-typedef_variant_p (tree type)
-{
- return is_typedef_decl (TYPE_NAME (type));
-}
-
-\f
/* Makes a copy of BINFO and TYPE, which is to be inherited into a
graph dominated by T. If BINFO is NULL, TYPE is a dependent base,
and we do a shallow copy. If BINFO is non-NULL, we do a deep copy.
}
tree
-get_first_fn (tree from)
+get_fns (tree from)
{
gcc_assert (is_overloaded_fn (from));
/* A baselink is also considered an overloaded function. */
from = BASELINK_FUNCTIONS (from);
if (TREE_CODE (from) == TEMPLATE_ID_EXPR)
from = TREE_OPERAND (from, 0);
- return OVL_CURRENT (from);
+ return from;
+}
+
+tree
+get_first_fn (tree from)
+{
+ return OVL_CURRENT (get_fns (from));
}
/* Return a new OVL node, concatenating it with the old one. */
tree
build_exception_variant (tree type, tree raises)
{
- tree v = TYPE_MAIN_VARIANT (type);
- int type_quals = TYPE_QUALS (type);
+ tree v;
+ int type_quals;
+
+ if (comp_except_specs (raises, TYPE_RAISES_EXCEPTIONS (type), ce_exact))
+ return type;
- for (; v; v = TYPE_NEXT_VARIANT (v))
+ type_quals = TYPE_QUALS (type);
+ for (v = TYPE_MAIN_VARIANT (type); v; v = TYPE_NEXT_VARIANT (v))
if (check_qualified_type (v, type, type_quals)
- && comp_except_specs (raises, TYPE_RAISES_EXCEPTIONS (v), 1))
+ && comp_except_specs (raises, TYPE_RAISES_EXCEPTIONS (v), ce_exact))
return v;
/* Need to build a new variant. */
/* Fall through. */
case ENUMERAL_TYPE:
/* Only treat anonymous types as having no linkage if they're at
- namespace scope. This doesn't have a core issue number yet. */
+ namespace scope. This is core issue 966. */
if (TYPE_ANONYMOUS_P (t) && TYPE_NAMESPACE_SCOPE_P (t))
return t;
return no_linkage_check (TYPE_CONTEXT (t), relaxed_p);
else if (TREE_CODE (r) == FUNCTION_DECL)
{
- if (!relaxed_p || !vague_linkage_fn_p (r))
+ if (!relaxed_p || !vague_linkage_p (r))
return t;
else
r = CP_DECL_CONTEXT (r);
{
print_search_statistics ();
print_class_statistics ();
+ print_template_statistics ();
#ifdef GATHER_STATISTICS
fprintf (stderr, "maximum template instantiation depth reached: %d\n",
depth_reached);
splay_tree target_remap = ((splay_tree) data);
tree t = *tp;
- if (!TYPE_P (t) && TREE_CONSTANT (t))
+ if (!TYPE_P (t) && TREE_CONSTANT (t) && !TREE_SIDE_EFFECTS (t))
{
/* There can't be any TARGET_EXPRs or their slot variables below
- this point. We used to check !TREE_SIDE_EFFECTS, but then we
- failed to copy an ADDR_EXPR of the slot VAR_DECL. */
+ this point. */
*walk_subtrees = 0;
return NULL_TREE;
}
return t;
}
-/* Similar to `build_call_list', but for template definitions of non-dependent
- expressions. NON_DEP is the non-dependent expression that has been
- built. */
+/* Similar to `build_nt_call_vec', but for template definitions of
+ non-dependent expressions. NON_DEP is the non-dependent expression
+ that has been built. */
tree
build_min_non_dep_call_vec (tree non_dep, tree fn, VEC(tree,gc) *argvec)
/* We need to do this when determining whether or not two
non-type pointer to member function template arguments
are the same. */
- if (!(same_type_p (TREE_TYPE (t1), TREE_TYPE (t2))
- /* The first operand is RTL. */
- && TREE_OPERAND (t1, 0) == TREE_OPERAND (t2, 0)))
+ if (!same_type_p (TREE_TYPE (t1), TREE_TYPE (t2))
+ || CONSTRUCTOR_NELTS (t1) != CONSTRUCTOR_NELTS (t2))
return false;
- return cp_tree_equal (TREE_OPERAND (t1, 1), TREE_OPERAND (t2, 1));
+ {
+ tree field, value;
+ unsigned int i;
+ FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (t1), i, field, value)
+ {
+ constructor_elt *elt2 = CONSTRUCTOR_ELT (t2, i);
+ if (!cp_tree_equal (field, elt2->index)
+ || !cp_tree_equal (value, elt2->value))
+ return false;
+ }
+ }
+ return true;
case TREE_LIST:
if (!cp_tree_equal (TREE_PURPOSE (t1), TREE_PURPOSE (t2)))
arg2 = next_call_expr_arg (&iter2))
if (!cp_tree_equal (arg1, arg2))
return false;
- return (arg1 || arg2);
+ if (arg1 || arg2)
+ return false;
+ return true;
}
case TARGET_EXPR:
BASELINK_FUNCTIONS (t2)));
case TEMPLATE_PARM_INDEX:
+ if (TEMPLATE_PARM_NUM_SIBLINGS (t1)
+ != TEMPLATE_PARM_NUM_SIBLINGS (t2))
+ return false;
return (TEMPLATE_PARM_IDX (t1) == TEMPLATE_PARM_IDX (t2)
&& TEMPLATE_PARM_LEVEL (t1) == TEMPLATE_PARM_LEVEL (t2)
&& (TEMPLATE_PARM_PARAMETER_PACK (t1)
return same_type_p (TRAIT_EXPR_TYPE1 (t1), TRAIT_EXPR_TYPE1 (t2))
&& same_type_p (TRAIT_EXPR_TYPE2 (t1), TRAIT_EXPR_TYPE2 (t2));
+ case CAST_EXPR:
+ case STATIC_CAST_EXPR:
+ case REINTERPRET_CAST_EXPR:
+ case CONST_CAST_EXPR:
+ case DYNAMIC_CAST_EXPR:
+ case NEW_EXPR:
+ if (!same_type_p (TREE_TYPE (t1), TREE_TYPE (t2)))
+ return false;
+ /* Now compare operands as usual. */
+ break;
+
default:
break;
}
int
varargs_function_p (const_tree function)
{
- const_tree parm = TYPE_ARG_TYPES (TREE_TYPE (function));
- for (; parm; parm = TREE_CHAIN (parm))
- if (TREE_VALUE (parm) == void_type_node)
- return 0;
- return 1;
+ return stdarg_p (TREE_TYPE (function));
}
/* Returns 1 if decl is a member of a class. */
build_dummy_object (tree type)
{
tree decl = build1 (NOP_EXPR, build_pointer_type (type), void_zero_node);
- return cp_build_indirect_ref (decl, NULL, tf_warning_or_error);
+ return cp_build_indirect_ref (decl, RO_NULL, tf_warning_or_error);
}
/* We've gotten a reference to a member of TYPE. Return *this if appropriate,
{
tree decl, context;
tree binfo;
+ tree current = current_nonlambda_class_type ();
- if (current_class_type
- && (binfo = lookup_base (current_class_type, type,
- ba_unique | ba_quiet, NULL)))
- context = current_class_type;
+ if (current
+ && (binfo = lookup_base (current, type, ba_any, NULL)))
+ context = current;
else
{
/* Reference from a nested class member function. */
if (binfop)
*binfop = binfo;
- if (current_class_ref && context == current_class_type
- /* Kludge: Make sure that current_class_type is actually
- correct. It might not be if we're in the middle of
- tsubst_default_argument. */
- && same_type_p (TYPE_MAIN_VARIANT (TREE_TYPE (current_class_ref)),
- current_class_type))
+ if (current_class_ref
+ /* current_class_ref might not correspond to current_class_type if
+ we're in tsubst_default_argument or a lambda-declarator; in either
+ case, we want to use current_class_ref if it matches CONTEXT. */
+ && (same_type_ignoring_top_level_qualifiers_p
+ (TREE_TYPE (current_class_ref), context)))
decl = current_class_ref;
+ else if (current != current_class_type
+ && context == nonlambda_method_basetype ())
+ /* In a lambda, need to go through 'this' capture. */
+ decl = (cp_build_indirect_ref
+ ((lambda_expr_this_capture
+ (CLASSTYPE_LAMBDA_EXPR (current_class_type))),
+ RO_NULL, tf_warning_or_error));
else
decl = build_dummy_object (context);
return 0;
}
-/* Returns true iff copying an object of type T is non-trivial. */
+/* Returns true iff copying an object of type T (including via move
+ constructor) is non-trivial. That is, T has no non-trivial copy
+ constructors and no non-trivial move constructors. */
bool
type_has_nontrivial_copy_init (const_tree t)
t = strip_array_types (CONST_CAST_TREE (t));
if (CLASS_TYPE_P (t))
- return TYPE_HAS_COMPLEX_INIT_REF (t);
+ {
+ gcc_assert (COMPLETE_TYPE_P (t));
+ return ((TYPE_HAS_COPY_CTOR (t)
+ && TYPE_HAS_COMPLEX_COPY_CTOR (t))
+ || TYPE_HAS_COMPLEX_MOVE_CTOR (t));
+ }
else
return 0;
}
-/* Returns 1 iff type T is a trivial type, as defined in [basic.types]. */
+/* Returns 1 iff type T is a trivially copyable type, as defined in
+ [basic.types] and [class]. */
+
+bool
+trivially_copyable_p (const_tree t)
+{
+ t = strip_array_types (CONST_CAST_TREE (t));
+
+ if (CLASS_TYPE_P (t))
+ return ((!TYPE_HAS_COPY_CTOR (t)
+ || !TYPE_HAS_COMPLEX_COPY_CTOR (t))
+ && !TYPE_HAS_COMPLEX_MOVE_CTOR (t)
+ && (!TYPE_HAS_COPY_ASSIGN (t)
+ || !TYPE_HAS_COMPLEX_COPY_ASSIGN (t))
+ && !TYPE_HAS_COMPLEX_MOVE_ASSIGN (t)
+ && TYPE_HAS_TRIVIAL_DESTRUCTOR (t));
+ else
+ return scalarish_type_p (t);
+}
+
+/* Returns 1 iff type T is a trivial type, as defined in [basic.types] and
+ [class]. */
bool
trivial_type_p (const_tree t)
if (CLASS_TYPE_P (t))
return (TYPE_HAS_TRIVIAL_DFLT (t)
- && TYPE_HAS_TRIVIAL_INIT_REF (t)
- && TYPE_HAS_TRIVIAL_ASSIGN_REF (t)
- && TYPE_HAS_TRIVIAL_DESTRUCTOR (t));
+ && trivially_copyable_p (t));
else
return scalarish_type_p (t);
}
argument unmodified and we assign it to a const_tree. */
t = strip_array_types (CONST_CAST_TREE(t));
- if (CLASS_TYPE_P (t))
+ if (!CLASS_TYPE_P (t))
+ return scalarish_type_p (t);
+ else if (cxx_dialect > cxx98)
/* [class]/10: A POD struct is a class that is both a trivial class and a
standard-layout class, and has no non-static data members of type
non-POD struct, non-POD union (or array of such types).
non-std-layout or non-trivial, the class will be too. */
return (std_layout_type_p (t) && trivial_type_p (t));
else
- return scalarish_type_p (t);
+ /* The C++98 definition of POD is different. */
+ return !CLASSTYPE_NON_LAYOUT_POD_P (t);
}
/* Returns true iff T is POD for the purpose of layout, as defined in the
tree new_type;
new_type = build_type_attribute_variant (type, attributes);
- if ((TREE_CODE (new_type) == FUNCTION_TYPE
- || TREE_CODE (new_type) == METHOD_TYPE)
- && (TYPE_RAISES_EXCEPTIONS (new_type)
- != TYPE_RAISES_EXCEPTIONS (type)))
+ if (TREE_CODE (new_type) == FUNCTION_TYPE
+ || TREE_CODE (new_type) == METHOD_TYPE)
new_type = build_exception_variant (new_type,
TYPE_RAISES_EXCEPTIONS (type));
bool
cxx_type_hash_eq (const_tree typea, const_tree typeb)
{
- gcc_assert (TREE_CODE (typea) == FUNCTION_TYPE);
+ gcc_assert (TREE_CODE (typea) == FUNCTION_TYPE
+ || TREE_CODE (typea) == METHOD_TYPE);
return comp_except_specs (TYPE_RAISES_EXCEPTIONS (typea),
- TYPE_RAISES_EXCEPTIONS (typeb), 1);
+ TYPE_RAISES_EXCEPTIONS (typeb), ce_exact);
}
/* Apply FUNC to all language-specific sub-trees of TP in a pre-order
if (DECL_CONSTRUCTOR_P (decl))
return sfk_constructor;
if (DECL_OVERLOADED_OPERATOR_P (decl) == NOP_EXPR)
- return sfk_assignment_operator;
+ {
+ if (copy_fn_p (decl))
+ return sfk_copy_assignment;
+ if (move_fn_p (decl))
+ return sfk_move_assignment;
+ }
if (DECL_MAYBE_IN_CHARGE_DESTRUCTOR_P (decl))
return sfk_destructor;
if (DECL_COMPLETE_DESTRUCTOR_P (decl))
/* Everything else has internal linkage. */
return lk_internal;
}
+
+/* Returns the storage duration of the object or reference associated with
+ the indicated DECL, which should be a VAR_DECL or PARM_DECL. */
+
+duration_kind
+decl_storage_duration (tree decl)
+{
+ if (TREE_CODE (decl) == PARM_DECL)
+ return dk_auto;
+ if (TREE_CODE (decl) == FUNCTION_DECL)
+ return dk_static;
+ gcc_assert (TREE_CODE (decl) == VAR_DECL);
+ if (!TREE_STATIC (decl)
+ && !DECL_EXTERNAL (decl))
+ return dk_auto;
+ if (DECL_THREAD_LOCAL_P (decl))
+ return dk_thread;
+ return dk_static;
+}
\f
/* EXP is an expression that we want to pre-evaluate. Returns (in
*INITP) an expression that will perform the pre-evaluation. The
if (!TREE_SIDE_EFFECTS (exp))
init_expr = NULL_TREE;
- else if (!real_lvalue_p (exp)
- || !TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (exp)))
+ else if (!TYPE_NEEDS_CONSTRUCTING (TREE_TYPE (exp))
+ || !lvalue_or_rvalue_with_address_p (exp))
{
init_expr = get_target_expr (exp);
exp = TARGET_EXPR_SLOT (init_expr);
}
else
{
- exp = cp_build_unary_op (ADDR_EXPR, exp, 1, tf_warning_or_error);
+ bool xval = !real_lvalue_p (exp);
+ exp = cp_build_addr_expr (exp, tf_warning_or_error);
init_expr = get_target_expr (exp);
exp = TARGET_EXPR_SLOT (init_expr);
- exp = cp_build_indirect_ref (exp, 0, tf_warning_or_error);
+ exp = cp_build_indirect_ref (exp, RO_NULL, tf_warning_or_error);
+ if (xval)
+ exp = move (exp);
}
*initp = init_expr;
cast_valid_in_integral_constant_expression_p (tree type)
{
return (INTEGRAL_OR_ENUMERATION_TYPE_P (type)
+ || cxx_dialect >= cxx0x
|| dependent_type_p (type)
|| type == error_mark_node);
}
if (!gimple_has_body_p (decl)
&& !DECL_THUNK_P (decl)
&& !DECL_EXTERNAL (decl))
- return true;
+ {
+ struct cgraph_node *node = cgraph_get_node (decl);
+
+ /* Don't fix same_body aliases. Although they don't have their own
+ CFG, they share it with what they alias to. */
+ if (!node
+ || node->decl == decl
+ || !node->same_body)
+ return true;
+ }
return false;
}
&& ANON_AGGRNAME_P (name))
TYPE_NAME (t) = NULL_TREE;
}
+ if (TREE_CODE (t) == NAMESPACE_DECL)
+ {
+ /* The list of users of a namespace isn't useful for the middle-end
+ or debug generators. */
+ DECL_NAMESPACE_USERS (t) = NULL_TREE;
+ /* Neither do we need the leftover chaining of namespaces
+ from the binding level. */
+ DECL_CHAIN (t) = NULL_TREE;
+ }
+}
+
+/* Stub for c-common. Please keep in sync with c-decl.c.
+ FIXME: If address space support is target specific, then this
+ should be a C target hook. But currently this is not possible,
+ because this function is called via REGISTER_TARGET_PRAGMAS. */
+void
+c_register_addr_space (const char *word ATTRIBUTE_UNUSED,
+ addr_space_t as ATTRIBUTE_UNUSED)
+{
}
\f