static tree maybe_convert_cond (tree);
static tree finalize_nrv_r (tree *, int *, void *);
+static tree capture_decltype (tree);
+static tree thisify_lambda_field (tree);
/* Deferred Access Checking Overview
TREE_USED (destination) = 1;
else
{
- /* The DESTINATION is being used as an rvalue. */
if (!processing_template_decl)
{
- destination = decay_conversion (destination);
destination = cp_convert (ptr_type_node, destination);
if (error_operand_p (destination))
return NULL_TREE;
}
/* Finish an asm-statement, whose components are a STRING, some
- OUTPUT_OPERANDS, some INPUT_OPERANDS, and some CLOBBERS. Also note
- whether the asm-statement should be considered volatile. */
+ OUTPUT_OPERANDS, some INPUT_OPERANDS, some CLOBBERS and some
+ LABELS. Also note whether the asm-statement should be
+ considered volatile. */
tree
finish_asm_stmt (int volatile_p, tree string, tree output_operands,
- tree input_operands, tree clobbers)
+ tree input_operands, tree clobbers, tree labels)
{
tree r;
tree t;
oconstraints = (const char **) alloca (noutputs * sizeof (char *));
string = resolve_asm_operand_names (string, output_operands,
- input_operands);
+ input_operands, labels);
for (i = 0, t = output_operands; t; t = TREE_CHAIN (t), ++i)
{
effectively const. */
|| (CLASS_TYPE_P (TREE_TYPE (operand))
&& C_TYPE_FIELDS_READONLY (TREE_TYPE (operand)))))
- readonly_error (operand, "assignment (via 'asm' output)");
+ readonly_error (operand, REK_ASSIGNMENT_ASM);
constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t)));
oconstraints[i] = constraint;
r = build_stmt (input_location, ASM_EXPR, string,
output_operands, input_operands,
- clobbers);
+ clobbers, labels);
ASM_VOLATILE_P (r) = volatile_p || noutputs == 0;
r = maybe_cleanup_point_expr_void (r);
return add_stmt (r);
{
gcc_assert (TREE_CODE (decl) == FIELD_DECL);
- if (!object && skip_evaluation)
+ if (!object && cp_unevaluated_operand != 0)
{
/* DR 613: Can use non-static data members without an associated
object in sizeof/decltype/alignof. */
return error_mark_node;
}
+
+ /* If decl is a non-capture field and object has a lambda type,
+ then we have a reference to a member of 'this' from a
+ lambda inside a non-static member function, and we must get to decl
+ through the 'this' capture. If decl is not a member of that object,
+ either, then its access will still fail later. */
+ if (LAMBDA_TYPE_P (TREE_TYPE (object))
+ && !LAMBDA_TYPE_P (DECL_CONTEXT (decl)))
+ object = cp_build_indirect_ref (lambda_expr_this_capture
+ (CLASSTYPE_LAMBDA_EXPR
+ (TREE_TYPE (object))),
+ RO_NULL,
+ /*complain=*/tf_warning_or_error);
+
if (current_class_ptr)
TREE_USED (current_class_ptr) = 1;
if (processing_template_decl && !qualifying_scope)
return build_min (COMPONENT_REF, type, object, decl, NULL_TREE);
}
+ /* If PROCESSING_TEMPLATE_DECL is nonzero here, then
+ QUALIFYING_SCOPE is also non-null. Wrap this in a SCOPE_REF
+ for now. */
+ else if (processing_template_decl)
+ return build_qualified_name (TREE_TYPE (decl),
+ qualifying_scope,
+ DECL_NAME (decl),
+ /*template_p=*/false);
else
{
tree access_type = TREE_TYPE (object);
}
}
- /* If PROCESSING_TEMPLATE_DECL is nonzero here, then
- QUALIFYING_SCOPE is also non-null. Wrap this in a SCOPE_REF
- for now. */
- if (processing_template_decl)
- return build_qualified_name (TREE_TYPE (decl),
- qualifying_scope,
- DECL_NAME (decl),
- /*template_p=*/false);
-
perform_or_defer_access_check (TYPE_BINFO (access_type), decl,
decl);
}
}
+/* If we are currently parsing a template and we encountered a typedef
+ TYPEDEF_DECL that is being accessed though CONTEXT, this function
+ adds the typedef to a list tied to the current template.
+ At tempate instantiatin time, that list is walked and access check
+ performed for each typedef.
+ LOCATION is the location of the usage point of TYPEDEF_DECL. */
+
+void
+add_typedef_to_current_template_for_access_check (tree typedef_decl,
+ tree context,
+ location_t location)
+{
+ tree template_info = NULL;
+ tree cs = current_scope ();
+
+ if (!is_typedef_decl (typedef_decl)
+ || !context
+ || !CLASS_TYPE_P (context)
+ || !cs)
+ return;
+
+ if (CLASS_TYPE_P (cs) || TREE_CODE (cs) == FUNCTION_DECL)
+ template_info = get_template_info (cs);
+
+ if (template_info
+ && TI_TEMPLATE (template_info)
+ && !currently_open_class (context))
+ append_type_to_template_for_access_check (cs, typedef_decl,
+ context, location);
+}
+
/* DECL was the declaration to which a qualified-id resolved. Issue
an error message if it is not accessible. If OBJECT_TYPE is
non-NULL, we have just seen `x->' or `x.' and OBJECT_TYPE is the
add it to a list tied to the template.
At template instantiation time, that list will be walked and
access check performed. */
- if (is_typedef_decl (decl))
- {
- /* This the scope through which type_decl is accessed.
- It will be useful information later to do access check for
- type_decl usage. */
- tree scope = nested_name_specifier
- ? nested_name_specifier
- : DECL_CONTEXT (decl);
- tree templ_info = NULL;
- tree cs = current_scope ();
-
- if (cs && (CLASS_TYPE_P (cs) || TREE_CODE (cs) == FUNCTION_DECL))
- templ_info = get_template_info (cs);
-
- if (templ_info
- && TI_TEMPLATE (templ_info)
- && scope
- && CLASS_TYPE_P (scope)
- && !currently_open_class (scope))
- append_type_to_template_for_access_check (current_scope (), decl, scope);
- }
+ add_typedef_to_current_template_for_access_check (decl,
+ nested_name_specifier
+ ? nested_name_specifier
+ : DECL_CONTEXT (decl),
+ input_location);
/* If we're not checking, return immediately. */
if (deferred_access_no_check)
return t;
}
+/* Return TRUE iff EXPR_STMT is an empty list of
+ expression statements. */
+
+bool
+empty_expr_stmt_p (tree expr_stmt)
+{
+ tree body = NULL_TREE;
+
+ if (expr_stmt == void_zero_node)
+ return true;
+
+ if (expr_stmt)
+ {
+ if (TREE_CODE (expr_stmt) == EXPR_STMT)
+ body = EXPR_STMT_EXPR (expr_stmt);
+ else if (TREE_CODE (expr_stmt) == STATEMENT_LIST)
+ body = expr_stmt;
+ }
+
+ if (body)
+ {
+ if (TREE_CODE (body) == STATEMENT_LIST)
+ return tsi_end_p (tsi_start (body));
+ else
+ return empty_expr_stmt_p (body);
+ }
+ return false;
+}
+
/* Perform Koenig lookup. FN is the postfix-expression representing
the function (or functions) to call; ARGS are the arguments to the
call. Returns the functions to be considered by overload
tree identifier = NULL_TREE;
tree functions = NULL_TREE;
tree tmpl_args = NULL_TREE;
+ bool template_id = false;
if (TREE_CODE (fn) == TEMPLATE_ID_EXPR)
{
+ /* Use a separate flag to handle null args. */
+ template_id = true;
tmpl_args = TREE_OPERAND (fn, 1);
fn = TREE_OPERAND (fn, 0);
}
fn = unqualified_fn_lookup_error (identifier);
}
- if (fn && tmpl_args)
- fn = build_nt (TEMPLATE_ID_EXPR, fn, tmpl_args);
+ if (fn && template_id)
+ fn = build2 (TEMPLATE_ID_EXPR, unknown_type_node, fn, tmpl_args);
return fn;
}
if (current_class_ptr)
{
- result = current_class_ptr;
+ tree type = TREE_TYPE (current_class_ref);
+
+ /* In a lambda expression, 'this' refers to the captured 'this'. */
+ if (LAMBDA_TYPE_P (type))
+ result = lambda_expr_this_capture (CLASSTYPE_LAMBDA_EXPR (type));
+ else
+ result = current_class_ptr;
+
}
else if (current_function_decl
&& DECL_STATIC_FUNCTION_P (current_function_decl))
tree decl = create_temporary_var (type);
DECL_INITIAL (decl) = compound_literal;
TREE_STATIC (decl) = 1;
+ cp_apply_type_quals_to_decl (cp_type_quals (type), decl);
decl = pushdecl_top_level (decl);
DECL_NAME (decl) = make_anon_name ();
SET_DECL_ASSEMBLER_NAME (decl, DECL_NAME (decl));
error ("definition of %q#T inside template parameter list", t);
return error_mark_node;
}
+
+ /* According to the C++ ABI, decimal classes defined in ISO/IEC TR 24733
+ are passed the same as decimal scalar types. */
+ if (TREE_CODE (t) == RECORD_TYPE
+ && !processing_template_decl)
+ {
+ tree ns = TYPE_CONTEXT (t);
+ if (ns && TREE_CODE (ns) == NAMESPACE_DECL
+ && DECL_CONTEXT (ns) == std_node
+ && DECL_NAME (ns)
+ && !strcmp (IDENTIFIER_POINTER (DECL_NAME (ns)), "decimal"))
+ {
+ const char *n = TYPE_NAME_STRING (t);
+ if ((strcmp (n, "decimal32") == 0)
+ || (strcmp (n, "decimal64") == 0)
+ || (strcmp (n, "decimal128") == 0))
+ TYPE_TRANSPARENT_AGGR (t) = 1;
+ }
+ }
+
/* A non-implicit typename comes from code like:
template <typename T> struct A {
pushtag (make_anon_name (), t, /*tag_scope=*/ts_current);
}
- /* Update the location of the decl. */
- DECL_SOURCE_LOCATION (TYPE_NAME (t)) = input_location;
-
if (TYPE_BEING_DEFINED (t))
{
t = make_class_type (TREE_CODE (t));
else if (TYPE_P (scope))
{
if (!COMPLETE_TYPE_P (scope))
- error ("%Hincomplete type %qT used in nested name specifier",
- &location, scope);
+ error_at (location, "incomplete type %qT used in nested name specifier",
+ scope);
else if (TREE_CODE (decl) == TREE_LIST)
{
- error ("%Hreference to %<%T::%D%> is ambiguous", &location, scope, name);
+ error_at (location, "reference to %<%T::%D%> is ambiguous",
+ scope, name);
print_candidates (decl);
}
else
- error ("%H%qD is not a member of %qT", &location, name, scope);
+ error_at (location, "%qD is not a member of %qT", name, scope);
}
else if (scope != global_namespace)
- error ("%H%qD is not a member of %qD", &location, name, scope);
+ error_at (location, "%qD is not a member of %qD", name, scope);
else
- error ("%H%<::%D%> has not been declared", &location, name);
+ error_at (location, "%<::%D%> has not been declared", name);
}
/* If FNS is a member function, a set of member functions, or a
return build_baselink (cl, cl, fns, /*optype=*/NULL_TREE);
}
+/* Returns true iff DECL is an automatic variable from a function outside
+ the current one. */
+
+static bool
+outer_automatic_var_p (tree decl)
+{
+ return ((TREE_CODE (decl) == VAR_DECL || TREE_CODE (decl) == PARM_DECL)
+ && DECL_FUNCTION_SCOPE_P (decl)
+ && !TREE_STATIC (decl)
+ && DECL_CONTEXT (decl) != current_function_decl);
+}
+
+/* Returns true iff DECL is a capture field from a lambda that is not our
+ immediate context. */
+
+static bool
+outer_lambda_capture_p (tree decl)
+{
+ return (TREE_CODE (decl) == FIELD_DECL
+ && LAMBDA_TYPE_P (DECL_CONTEXT (decl))
+ && (!current_class_type
+ || !DERIVED_FROM_P (DECL_CONTEXT (decl), current_class_type)));
+}
+
/* ID_EXPRESSION is a representation of parsed, but unprocessed,
id-expression. (See cp_parser_id_expression for details.) SCOPE,
if non-NULL, is the type or namespace used to explicitly qualify
if (!scope && decl != error_mark_node)
maybe_note_name_used_in_class (id_expression, decl);
- /* Disallow uses of local variables from containing functions. */
- if (TREE_CODE (decl) == VAR_DECL || TREE_CODE (decl) == PARM_DECL)
+ /* Disallow uses of local variables from containing functions, except
+ within lambda-expressions. */
+ if ((outer_automatic_var_p (decl)
+ || outer_lambda_capture_p (decl))
+ /* It's not a use (3.2) if we're in an unevaluated context. */
+ && !cp_unevaluated_operand)
{
- tree context = decl_function_context (decl);
- if (context != NULL_TREE && context != current_function_decl
- && ! TREE_STATIC (decl))
+ tree context = DECL_CONTEXT (decl);
+ tree containing_function = current_function_decl;
+ tree lambda_stack = NULL_TREE;
+ tree lambda_expr = NULL_TREE;
+ tree initializer = decl;
+
+ /* Core issue 696: "[At the July 2009 meeting] the CWG expressed
+ support for an approach in which a reference to a local
+ [constant] automatic variable in a nested class or lambda body
+ would enter the expression as an rvalue, which would reduce
+ the complexity of the problem"
+
+ FIXME update for final resolution of core issue 696. */
+ if (DECL_INTEGRAL_CONSTANT_VAR_P (decl))
+ return integral_constant_value (decl);
+
+ if (TYPE_P (context))
+ {
+ /* Implicit capture of an explicit capture. */
+ context = lambda_function (context);
+ initializer = thisify_lambda_field (decl);
+ }
+
+ /* If we are in a lambda function, we can move out until we hit
+ 1. the context,
+ 2. a non-lambda function, or
+ 3. a non-default capturing lambda function. */
+ while (context != containing_function
+ && LAMBDA_FUNCTION_P (containing_function))
+ {
+ lambda_expr = CLASSTYPE_LAMBDA_EXPR
+ (DECL_CONTEXT (containing_function));
+
+ if (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda_expr)
+ == CPLD_NONE)
+ break;
+
+ lambda_stack = tree_cons (NULL_TREE,
+ lambda_expr,
+ lambda_stack);
+
+ containing_function
+ = decl_function_context (containing_function);
+ }
+
+ if (context == containing_function)
+ {
+ decl = add_default_capture (lambda_stack,
+ /*id=*/DECL_NAME (decl),
+ initializer);
+ }
+ else if (lambda_expr)
+ {
+ error ("%qD is not captured", decl);
+ return error_mark_node;
+ }
+ else
{
error (TREE_CODE (decl) == VAR_DECL
? "use of %<auto%> variable from containing function"
style = arg;
}
- call_expr = build_call_array (TREE_TYPE (TREE_TYPE (TREE_TYPE (fn))),
- fn,
- aggr_init_expr_nargs (aggr_init_expr),
- AGGR_INIT_EXPR_ARGP (aggr_init_expr));
+ call_expr = build_call_array_loc (input_location,
+ TREE_TYPE (TREE_TYPE (TREE_TYPE (fn))),
+ fn,
+ aggr_init_expr_nargs (aggr_init_expr),
+ AGGR_INIT_EXPR_ARGP (aggr_init_expr));
if (style == ctor)
{
is so that you can know statically the entire set of thunks that
will ever be needed for a given virtual function, thereby
enabling you to output all the thunks with the function itself. */
- if (DECL_VIRTUAL_P (fn))
+ if (DECL_VIRTUAL_P (fn)
+ /* Do not emit thunks for extern template instantiations. */
+ && ! DECL_REALLY_EXTERN (fn))
{
tree thunk;
/* Generate RTL for FN. */
-void
-expand_or_defer_fn (tree fn)
+bool
+expand_or_defer_fn_1 (tree fn)
{
/* When the parser calls us after finishing the body of a template
function, we don't really want to expand the body. */
is not a GC root. */
if (!function_depth)
ggc_collect ();
- return;
+ return false;
}
- gcc_assert (gimple_body (fn));
+ gcc_assert (DECL_SAVED_TREE (fn));
/* If this is a constructor or destructor body, we have to clone
it. */
/* We don't want to process FN again, so pretend we've written
it out, even though we haven't. */
TREE_ASM_WRITTEN (fn) = 1;
- return;
+ DECL_SAVED_TREE (fn) = NULL_TREE;
+ return false;
}
/* We make a decision about linkage for these functions at the end
/* There's no reason to do any of the work here if we're only doing
semantic analysis; this code just generates RTL. */
if (flag_syntax_only)
- return;
+ return false;
- function_depth++;
+ return true;
+}
- /* Expand or defer, at the whim of the compilation unit manager. */
- cgraph_finalize_function (fn, function_depth > 1);
+void
+expand_or_defer_fn (tree fn)
+{
+ if (expand_or_defer_fn_1 (fn))
+ {
+ function_depth++;
- function_depth--;
+ /* Expand or defer, at the whim of the compilation unit manager. */
+ cgraph_finalize_function (fn, function_depth > 1);
+ emit_associated_thunks (fn);
+
+ function_depth--;
+ }
}
struct nrv_data
else
init = build_empty_stmt (EXPR_LOCATION (*tp));
DECL_INITIAL (dp->var) = NULL_TREE;
- SET_EXPR_LOCUS (init, EXPR_LOCUS (*tp));
+ SET_EXPR_LOCATION (init, EXPR_LOCATION (*tp));
*tp = init;
}
/* And replace all uses of the NRV with the RESULT_DECL. */
/* Make sure that DECL_DISCRIMINATOR_P continues to be true
after the allocation of the lang_decl structure. */
if (DECL_DISCRIMINATOR_P (v))
- DECL_LANG_SPECIFIC (v)->decl_flags.u2sel = 1;
+ DECL_LANG_SPECIFIC (v)->u.base.u2sel = 1;
}
if (! DECL_THREAD_LOCAL_P (v))
}
if (cond == error_mark_node)
{
- error ("%Hinvalid controlling predicate", &elocus);
+ error_at (elocus, "invalid controlling predicate");
return true;
}
diff = build_x_binary_op (MINUS_EXPR, TREE_OPERAND (cond, 1),
return true;
if (TREE_CODE (TREE_TYPE (diff)) != INTEGER_TYPE)
{
- error ("%Hdifference between %qE and %qD does not have integer type",
- &elocus, TREE_OPERAND (cond, 1), iter);
+ error_at (elocus, "difference between %qE and %qD does not have integer type",
+ TREE_OPERAND (cond, 1), iter);
return true;
}
if (incr == error_mark_node)
{
- error ("%Hinvalid increment expression", &elocus);
+ error_at (elocus, "invalid increment expression");
return true;
}
if (decl == NULL)
{
- error ("%Hexpected iteration declaration or initialization",
- &locus);
+ error_at (locus,
+ "expected iteration declaration or initialization");
return NULL;
}
}
if (cond == NULL)
{
- error ("%Hmissing controlling predicate", &elocus);
+ error_at (elocus, "missing controlling predicate");
return NULL;
}
if (incr == NULL)
{
- error ("%Hmissing increment expression", &elocus);
+ error_at (elocus, "missing increment expression");
return NULL;
}
if (!DECL_P (decl))
{
- error ("%Hexpected iteration declaration or initialization",
- &elocus);
+ error_at (elocus, "expected iteration declaration or initialization");
return NULL;
}
if (!INTEGRAL_TYPE_P (TREE_TYPE (decl))
&& TREE_CODE (TREE_TYPE (decl)) != POINTER_TYPE)
{
- error ("%Hinvalid type for iteration variable %qE", &elocus, decl);
+ error_at (elocus, "invalid type for iteration variable %qE", decl);
return NULL;
}
}
}
\f
-/* Returns decltype((EXPR)) for cases where we can drop the decltype and
- just return the type even though EXPR is a type-dependent expression.
- The ABI specifies which cases this applies to, which is a subset of the
- possible cases. */
+/* Returns the type of EXPR for cases where we can determine it even though
+ EXPR is a type-dependent expression. */
tree
describable_type (tree expr)
{
tree type = NULL_TREE;
- /* processing_template_decl isn't set when we're called from the mangling
- code, so bump it now. */
- ++processing_template_decl;
if (! type_dependent_expression_p (expr)
&& ! type_unknown_p (expr))
{
- type = TREE_TYPE (expr);
+ type = unlowered_expr_type (expr);
if (real_lvalue_p (expr))
type = build_reference_type (type);
}
- --processing_template_decl;
if (type)
return type;
case PARM_DECL:
case RESULT_DECL:
case FUNCTION_DECL:
- /* Named rvalue reference becomes lvalue. */
- type = build_reference_type (non_reference (TREE_TYPE (expr)));
+ return TREE_TYPE (expr);
break;
case NEW_EXPR:
return error_mark_node;
}
- if (type_dependent_expression_p (expr))
+ if (type_dependent_expression_p (expr)
+ /* In a template, a COMPONENT_REF has an IDENTIFIER_NODE for op1 even
+ if it isn't dependent, so that we can check access control at
+ instantiation time, so defer the decltype as well (PR 42277). */
+ || (id_expression_or_member_access_p
+ && processing_template_decl
+ && TREE_CODE (expr) == COMPONENT_REF))
{
if (id_expression_or_member_access_p)
{
if (type && !type_uses_auto (type))
return type;
+ treat_as_dependent:
type = cxx_make_type (DECLTYPE_TYPE);
DECLTYPE_TYPE_EXPR (type) = expr;
DECLTYPE_TYPE_ID_EXPR_OR_MEMBER_ACCESS_P (type)
/* The type denoted by decltype(e) is defined as follows: */
+ expr = resolve_nondeduced_context (expr);
if (id_expression_or_member_access_p)
{
/* If e is an id-expression or a class member access (5.2.5
/* See through BASELINK nodes to the underlying functions. */
expr = BASELINK_FUNCTIONS (expr);
+ if (TREE_CODE (expr) == TEMPLATE_ID_EXPR)
+ expr = TREE_OPERAND (expr, 0);
+
if (TREE_CODE (expr) == OVERLOAD)
{
- if (OVL_CHAIN (expr))
+ if (OVL_CHAIN (expr)
+ || TREE_CODE (OVL_FUNCTION (expr)) == TEMPLATE_DECL)
{
error ("%qE refers to a set of overloaded functions", orig_expr);
return error_mark_node;
&& (TREE_CODE (TREE_TYPE (target_type)) == FUNCTION_TYPE
|| TREE_CODE (TREE_TYPE (target_type)) == METHOD_TYPE))
type = TREE_TYPE (TREE_TYPE (target_type));
+ else if (processing_template_decl)
+ /* Within a template finish_call_expr doesn't resolve
+ CALL_EXPR_FN, so even though this decltype isn't really
+ dependent let's defer resolving it. */
+ goto treat_as_dependent;
else
sorry ("unable to determine the declared type of expression %<%E%>",
expr);
if (real_lvalue_p (expr))
type = build_reference_type (type);
}
+ /* Within a lambda-expression:
+
+ Every occurrence of decltype((x)) where x is a possibly
+ parenthesized id-expression that names an entity of
+ automatic storage duration is treated as if x were
+ transformed into an access to a corresponding data member
+ of the closure type that would have been declared if x
+ were a use of the denoted entity. */
+ else if (outer_automatic_var_p (expr)
+ && current_function_decl
+ && LAMBDA_FUNCTION_P (current_function_decl))
+ type = capture_decltype (expr);
else
{
/* Otherwise, where T is the type of e, if e is an lvalue,
it now. */
if (CLASSTYPE_LAZY_COPY_CTOR (type))
lazily_declare_fn (sfk_copy_constructor, type);
+ if (CLASSTYPE_LAZY_MOVE_CTOR (type))
+ lazily_declare_fn (sfk_move_constructor, type);
fns = CLASSTYPE_CONSTRUCTORS (type);
}
else
switch (kind)
{
case CPTK_HAS_NOTHROW_ASSIGN:
+ type1 = strip_array_types (type1);
return (!CP_TYPE_CONST_P (type1) && type_code1 != REFERENCE_TYPE
&& (trait_expr_value (CPTK_HAS_TRIVIAL_ASSIGN, type1, type2)
|| (CLASS_TYPE_P (type1)
true))));
case CPTK_HAS_TRIVIAL_ASSIGN:
+ /* ??? The standard seems to be missing the "or array of such a class
+ type" wording for this trait. */
+ type1 = strip_array_types (type1);
return (!CP_TYPE_CONST_P (type1) && type_code1 != REFERENCE_TYPE
- && (pod_type_p (type1)
+ && (trivial_type_p (type1)
|| (CLASS_TYPE_P (type1)
&& TYPE_HAS_TRIVIAL_ASSIGN_REF (type1))));
case CPTK_HAS_TRIVIAL_CONSTRUCTOR:
type1 = strip_array_types (type1);
- return (pod_type_p (type1)
+ return (trivial_type_p (type1)
|| (CLASS_TYPE_P (type1) && TYPE_HAS_TRIVIAL_DFLT (type1)));
case CPTK_HAS_NOTHROW_COPY:
+ type1 = strip_array_types (type1);
return (trait_expr_value (CPTK_HAS_TRIVIAL_COPY, type1, type2)
|| (CLASS_TYPE_P (type1)
&& classtype_has_nothrow_assign_or_copy_p (type1, false)));
case CPTK_HAS_TRIVIAL_COPY:
- return (pod_type_p (type1) || type_code1 == REFERENCE_TYPE
+ /* ??? The standard seems to be missing the "or array of such a class
+ type" wording for this trait. */
+ type1 = strip_array_types (type1);
+ return (trivial_type_p (type1) || type_code1 == REFERENCE_TYPE
|| (CLASS_TYPE_P (type1) && TYPE_HAS_TRIVIAL_INIT_REF (type1)));
case CPTK_HAS_TRIVIAL_DESTRUCTOR:
type1 = strip_array_types (type1);
- return (pod_type_p (type1) || type_code1 == REFERENCE_TYPE
+ return (trivial_type_p (type1) || type_code1 == REFERENCE_TYPE
|| (CLASS_TYPE_P (type1)
&& TYPE_HAS_TRIVIAL_DESTRUCTOR (type1)));
case CPTK_IS_POLYMORPHIC:
return (CLASS_TYPE_P (type1) && TYPE_POLYMORPHIC_P (type1));
+ case CPTK_IS_STD_LAYOUT:
+ return (std_layout_type_p (type1));
+
+ case CPTK_IS_TRIVIAL:
+ return (trivial_type_p (type1));
+
case CPTK_IS_UNION:
return (type_code1 == UNION_TYPE);
|| kind == CPTK_IS_ENUM
|| kind == CPTK_IS_POD
|| kind == CPTK_IS_POLYMORPHIC
+ || kind == CPTK_IS_STD_LAYOUT
+ || kind == CPTK_IS_TRIVIAL
|| kind == CPTK_IS_UNION);
if (kind == CPTK_IS_CONVERTIBLE_TO)
case CPTK_IS_EMPTY:
case CPTK_IS_POD:
case CPTK_IS_POLYMORPHIC:
+ case CPTK_IS_STD_LAYOUT:
+ case CPTK_IS_TRIVIAL:
if (!check_trait_type (type1))
{
error ("incomplete type %qT not allowed", type1);
return 0;
}
+/* Return true if T is a literal type. */
+
+bool
+literal_type_p (tree t)
+{
+ if (SCALAR_TYPE_P (t))
+ return true;
+ if (CLASS_TYPE_P (t))
+ return CLASSTYPE_LITERAL_P (t);
+ if (TREE_CODE (t) == ARRAY_TYPE)
+ return literal_type_p (strip_array_types (t));
+ return false;
+}
+
+
+/* If DECL is a variable declared `constexpr', require its type
+ be literal. Return the DECL if OK, otherwise NULL. */
+
+tree
+ensure_literal_type_for_constexpr_object (tree decl)
+{
+ tree type = TREE_TYPE (decl);
+ if (TREE_CODE (decl) == VAR_DECL && DECL_DECLARED_CONSTEXPR_P (decl)
+ && !processing_template_decl && !literal_type_p (type))
+ {
+ error ("the type %qT of constexpr variable %qD is not literal",
+ type, decl);
+ return NULL;
+ }
+ return decl;
+}
+
+/* Return non-null if FUN certainly designates a valid constexpr function
+ declaration. Otherwise return NULL. Issue appropriate diagnostics
+ if necessary. Note that we only check the declaration, not the body
+ of the function. */
+
+tree
+validate_constexpr_fundecl (tree fun)
+{
+ tree rettype = NULL;
+ tree parm = NULL;
+
+ /* Don't bother if FUN is not marked constexpr. */
+ if (!DECL_DECLARED_CONSTEXPR_P (fun))
+ return NULL;
+
+ /* For a function template, we have absolutely no guarantee that all
+ instantiations will be constexpr. */
+ if (TREE_CODE (fun) == TEMPLATE_DECL)
+ return NULL;
+
+ parm = FUNCTION_FIRST_USER_PARM (fun);
+ for (; parm != NULL; parm = TREE_CHAIN (parm))
+ {
+ tree type = TREE_TYPE (parm);
+ if (dependent_type_p (type))
+ return NULL;
+ if (!literal_type_p (type))
+ {
+ error ("parameter %q#D is not of literal type", parm);
+ return NULL;
+ }
+ }
+
+ if (DECL_CONSTRUCTOR_P (fun))
+ return fun;
+
+ rettype = TREE_TYPE (TREE_TYPE (fun));
+ if (dependent_type_p (rettype))
+ return NULL;
+ if (!literal_type_p (rettype))
+ {
+ error ("return type %qT of function %qD is not a literal type",
+ TREE_TYPE (TREE_TYPE (fun)), fun);
+ return NULL;
+ }
+ return fun;
+}
+
+
+/* Constructor for a lambda expression. */
+
+tree
+build_lambda_expr (void)
+{
+ tree lambda = make_node (LAMBDA_EXPR);
+ LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda) = CPLD_NONE;
+ LAMBDA_EXPR_CAPTURE_LIST (lambda) = NULL_TREE;
+ LAMBDA_EXPR_THIS_CAPTURE (lambda) = NULL_TREE;
+ LAMBDA_EXPR_RETURN_TYPE (lambda) = NULL_TREE;
+ LAMBDA_EXPR_MUTABLE_P (lambda) = false;
+ return lambda;
+}
+
+/* Create the closure object for a LAMBDA_EXPR. */
+
+tree
+build_lambda_object (tree lambda_expr)
+{
+ /* Build aggregate constructor call.
+ - cp_parser_braced_list
+ - cp_parser_functional_cast */
+ VEC(constructor_elt,gc) *elts = NULL;
+ tree node, expr, type;
+ location_t saved_loc;
+
+ if (processing_template_decl)
+ return lambda_expr;
+
+ /* Make sure any error messages refer to the lambda-introducer. */
+ saved_loc = input_location;
+ input_location = LAMBDA_EXPR_LOCATION (lambda_expr);
+
+ for (node = LAMBDA_EXPR_CAPTURE_LIST (lambda_expr);
+ node;
+ node = TREE_CHAIN (node))
+ {
+ tree field = TREE_PURPOSE (node);
+ tree val = TREE_VALUE (node);
+
+ if (DECL_P (val))
+ mark_used (val);
+
+ /* Mere mortals can't copy arrays with aggregate initialization, so
+ do some magic to make it work here. */
+ if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE)
+ val = build_array_copy (val);
+ else if (DECL_NORMAL_CAPTURE_P (field)
+ && TREE_CODE (TREE_TYPE (field)) != REFERENCE_TYPE)
+ {
+ /* "the entities that are captured by copy are used to
+ direct-initialize each corresponding non-static data
+ member of the resulting closure object."
+
+ There's normally no way to express direct-initialization
+ from an element of a CONSTRUCTOR, so we build up a special
+ TARGET_EXPR to bypass the usual copy-initialization. */
+ val = force_rvalue (val);
+ if (TREE_CODE (val) == TARGET_EXPR)
+ TARGET_EXPR_DIRECT_INIT_P (val) = true;
+ }
+
+ CONSTRUCTOR_APPEND_ELT (elts, DECL_NAME (field), val);
+ }
+
+ expr = build_constructor (init_list_type_node, elts);
+ CONSTRUCTOR_IS_DIRECT_INIT (expr) = 1;
+
+ /* N2927: "[The closure] class type is not an aggregate."
+ But we briefly treat it as an aggregate to make this simpler. */
+ type = TREE_TYPE (lambda_expr);
+ CLASSTYPE_NON_AGGREGATE (type) = 0;
+ expr = finish_compound_literal (type, expr);
+ CLASSTYPE_NON_AGGREGATE (type) = 1;
+
+ input_location = saved_loc;
+ return expr;
+}
+
+/* Return an initialized RECORD_TYPE for LAMBDA.
+ LAMBDA must have its explicit captures already. */
+
+tree
+begin_lambda_type (tree lambda)
+{
+ tree type;
+
+ {
+ /* Unique name. This is just like an unnamed class, but we cannot use
+ make_anon_name because of certain checks against TYPE_ANONYMOUS_P. */
+ tree name;
+ name = make_lambda_name ();
+
+ /* Create the new RECORD_TYPE for this lambda. */
+ type = xref_tag (/*tag_code=*/record_type,
+ name,
+ /*scope=*/ts_within_enclosing_non_class,
+ /*template_header_p=*/false);
+ }
+
+ /* Designate it as a struct so that we can use aggregate initialization. */
+ CLASSTYPE_DECLARED_CLASS (type) = false;
+
+ /* Clear base types. */
+ xref_basetypes (type, /*bases=*/NULL_TREE);
+
+ /* Start the class. */
+ type = begin_class_definition (type, /*attributes=*/NULL_TREE);
+
+ /* Cross-reference the expression and the type. */
+ TREE_TYPE (lambda) = type;
+ CLASSTYPE_LAMBDA_EXPR (type) = lambda;
+
+ return type;
+}
+
+/* Returns the type to use for the return type of the operator() of a
+ closure class. */
+
+tree
+lambda_return_type (tree expr)
+{
+ tree type;
+ if (type_dependent_expression_p (expr))
+ {
+ type = cxx_make_type (DECLTYPE_TYPE);
+ DECLTYPE_TYPE_EXPR (type) = expr;
+ DECLTYPE_FOR_LAMBDA_RETURN (type) = true;
+ SET_TYPE_STRUCTURAL_EQUALITY (type);
+ }
+ else
+ type = type_decays_to (unlowered_expr_type (expr));
+ return type;
+}
+
+/* Given a LAMBDA_EXPR or closure type LAMBDA, return the op() of the
+ closure type. */
+
+tree
+lambda_function (tree lambda)
+{
+ tree type;
+ if (TREE_CODE (lambda) == LAMBDA_EXPR)
+ type = TREE_TYPE (lambda);
+ else
+ type = lambda;
+ gcc_assert (LAMBDA_TYPE_P (type));
+ /* Don't let debug_tree cause instantiation. */
+ if (CLASSTYPE_TEMPLATE_INSTANTIATION (type) && !COMPLETE_TYPE_P (type))
+ return NULL_TREE;
+ lambda = lookup_member (type, ansi_opname (CALL_EXPR),
+ /*protect=*/0, /*want_type=*/false);
+ if (lambda)
+ lambda = BASELINK_FUNCTIONS (lambda);
+ return lambda;
+}
+
+/* Returns the type to use for the FIELD_DECL corresponding to the
+ capture of EXPR.
+ The caller should add REFERENCE_TYPE for capture by reference. */
+
+tree
+lambda_capture_field_type (tree expr)
+{
+ tree type;
+ if (type_dependent_expression_p (expr))
+ {
+ type = cxx_make_type (DECLTYPE_TYPE);
+ DECLTYPE_TYPE_EXPR (type) = expr;
+ DECLTYPE_FOR_LAMBDA_CAPTURE (type) = true;
+ SET_TYPE_STRUCTURAL_EQUALITY (type);
+ }
+ else
+ type = non_reference (unlowered_expr_type (expr));
+ return type;
+}
+
+/* Recompute the return type for LAMBDA with body of the form:
+ { return EXPR ; } */
+
+void
+apply_lambda_return_type (tree lambda, tree return_type)
+{
+ tree fco = lambda_function (lambda);
+ tree result;
+
+ LAMBDA_EXPR_RETURN_TYPE (lambda) = return_type;
+
+ /* If we got a DECLTYPE_TYPE, don't stick it in the function yet,
+ it would interfere with instantiating the closure type. */
+ if (dependent_type_p (return_type))
+ return;
+ if (return_type == error_mark_node)
+ return;
+
+ /* TREE_TYPE (FUNCTION_DECL) == METHOD_TYPE
+ TREE_TYPE (METHOD_TYPE) == return-type */
+ TREE_TYPE (fco) = change_return_type (return_type, TREE_TYPE (fco));
+
+ result = DECL_RESULT (fco);
+ if (result == NULL_TREE)
+ return;
+
+ /* We already have a DECL_RESULT from start_preparsed_function.
+ Now we need to redo the work it and allocate_struct_function
+ did to reflect the new type. */
+ result = build_decl (input_location, RESULT_DECL, NULL_TREE,
+ TYPE_MAIN_VARIANT (return_type));
+ DECL_ARTIFICIAL (result) = 1;
+ DECL_IGNORED_P (result) = 1;
+ cp_apply_type_quals_to_decl (cp_type_quals (return_type),
+ result);
+
+ DECL_RESULT (fco) = result;
+
+ if (!processing_template_decl && aggregate_value_p (result, fco))
+ {
+#ifdef PCC_STATIC_STRUCT_RETURN
+ cfun->returns_pcc_struct = 1;
+#endif
+ cfun->returns_struct = 1;
+ }
+
+}
+
+/* DECL is a local variable or parameter from the surrounding scope of a
+ lambda-expression. Returns the decltype for a use of the capture field
+ for DECL even if it hasn't been captured yet. */
+
+static tree
+capture_decltype (tree decl)
+{
+ tree lam = CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (current_function_decl));
+ /* FIXME do lookup instead of list walk? */
+ tree cap = value_member (decl, LAMBDA_EXPR_CAPTURE_LIST (lam));
+ tree type;
+
+ if (cap)
+ type = TREE_TYPE (TREE_PURPOSE (cap));
+ else
+ switch (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lam))
+ {
+ case CPLD_NONE:
+ error ("%qD is not captured", decl);
+ return error_mark_node;
+
+ case CPLD_COPY:
+ type = TREE_TYPE (decl);
+ if (TREE_CODE (type) == REFERENCE_TYPE
+ && TREE_CODE (TREE_TYPE (type)) != FUNCTION_TYPE)
+ type = TREE_TYPE (type);
+ break;
+
+ case CPLD_REFERENCE:
+ type = TREE_TYPE (decl);
+ if (TREE_CODE (type) != REFERENCE_TYPE)
+ type = build_reference_type (TREE_TYPE (decl));
+ break;
+
+ default:
+ gcc_unreachable ();
+ }
+
+ if (TREE_CODE (type) != REFERENCE_TYPE)
+ {
+ if (!LAMBDA_EXPR_MUTABLE_P (lam))
+ type = cp_build_qualified_type (type, (TYPE_QUALS (type)
+ |TYPE_QUAL_CONST));
+ type = build_reference_type (type);
+ }
+ return type;
+}
+
+/* From an ID and INITIALIZER, create a capture (by reference if
+ BY_REFERENCE_P is true), add it to the capture-list for LAMBDA,
+ and return it. */
+
+tree
+add_capture (tree lambda, tree id, tree initializer, bool by_reference_p,
+ bool explicit_init_p)
+{
+ tree type;
+ tree member;
+
+ type = lambda_capture_field_type (initializer);
+ if (by_reference_p)
+ {
+ type = build_reference_type (type);
+ if (!real_lvalue_p (initializer))
+ error ("cannot capture %qE by reference", initializer);
+ }
+
+ /* Make member variable. */
+ member = build_lang_decl (FIELD_DECL, id, type);
+ if (!explicit_init_p)
+ /* Normal captures are invisible to name lookup but uses are replaced
+ with references to the capture field; we implement this by only
+ really making them invisible in unevaluated context; see
+ qualify_lookup. For now, let's make explicitly initialized captures
+ always visible. */
+ DECL_NORMAL_CAPTURE_P (member) = true;
+
+ /* Add it to the appropriate closure class if we've started it. */
+ if (current_class_type && current_class_type == TREE_TYPE (lambda))
+ finish_member_declaration (member);
+
+ LAMBDA_EXPR_CAPTURE_LIST (lambda)
+ = tree_cons (member, initializer, LAMBDA_EXPR_CAPTURE_LIST (lambda));
+
+ if (id == get_identifier ("__this"))
+ {
+ if (LAMBDA_EXPR_CAPTURES_THIS_P (lambda))
+ error ("already captured %<this%> in lambda expression");
+ LAMBDA_EXPR_THIS_CAPTURE (lambda) = member;
+ }
+
+ return member;
+}
+
+/* Register all the capture members on the list CAPTURES, which is the
+ LAMBDA_EXPR_CAPTURE_LIST for the lambda after the introducer. */
+
+void register_capture_members (tree captures)
+{
+ if (captures)
+ {
+ register_capture_members (TREE_CHAIN (captures));
+ finish_member_declaration (TREE_PURPOSE (captures));
+ }
+}
+
+/* Given a FIELD_DECL decl belonging to a closure type, return a
+ COMPONENT_REF of it relative to the 'this' parameter of the op() for
+ that type. */
+
+static tree
+thisify_lambda_field (tree decl)
+{
+ tree context = lambda_function (DECL_CONTEXT (decl));
+ tree object = cp_build_indirect_ref (DECL_ARGUMENTS (context),
+ RO_NULL,
+ tf_warning_or_error);
+ return finish_non_static_data_member (decl, object,
+ /*qualifying_scope*/NULL_TREE);
+}
+
+/* Similar to add_capture, except this works on a stack of nested lambdas.
+ BY_REFERENCE_P in this case is derived from the default capture mode.
+ Returns the capture for the lambda at the bottom of the stack. */
+
+tree
+add_default_capture (tree lambda_stack, tree id, tree initializer)
+{
+ bool this_capture_p = (id == get_identifier ("__this"));
+
+ tree member = NULL_TREE;
+
+ tree saved_class_type = current_class_type;
+
+ tree node;
+
+ for (node = lambda_stack;
+ node;
+ node = TREE_CHAIN (node))
+ {
+ tree lambda = TREE_VALUE (node);
+
+ current_class_type = TREE_TYPE (lambda);
+ member = add_capture (lambda,
+ id,
+ initializer,
+ /*by_reference_p=*/
+ (!this_capture_p
+ && (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda)
+ == CPLD_REFERENCE)),
+ /*explicit_init_p=*/false);
+ initializer = thisify_lambda_field (member);
+ }
+
+ current_class_type = saved_class_type;
+
+ return member;
+}
+
+/* Return the capture pertaining to a use of 'this' in LAMBDA, in the form of an
+ INDIRECT_REF, possibly adding it through default capturing. */
+
+tree
+lambda_expr_this_capture (tree lambda)
+{
+ tree result;
+
+ tree this_capture = LAMBDA_EXPR_THIS_CAPTURE (lambda);
+
+ /* Try to default capture 'this' if we can. */
+ if (!this_capture
+ && LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda) != CPLD_NONE)
+ {
+ tree containing_function = TYPE_CONTEXT (TREE_TYPE (lambda));
+ tree lambda_stack = tree_cons (NULL_TREE, lambda, NULL_TREE);
+ tree init = NULL_TREE;
+
+ /* If we are in a lambda function, we can move out until we hit:
+ 1. a non-lambda function,
+ 2. a lambda function capturing 'this', or
+ 3. a non-default capturing lambda function. */
+ while (LAMBDA_FUNCTION_P (containing_function))
+ {
+ tree lambda
+ = CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (containing_function));
+
+ if (LAMBDA_EXPR_THIS_CAPTURE (lambda))
+ {
+ /* An outer lambda has already captured 'this'. */
+ tree cap = LAMBDA_EXPR_THIS_CAPTURE (lambda);
+ init = thisify_lambda_field (cap);
+ break;
+ }
+
+ if (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda) == CPLD_NONE)
+ /* An outer lambda won't let us capture 'this'. */
+ break;
+
+ lambda_stack = tree_cons (NULL_TREE,
+ lambda,
+ lambda_stack);
+
+ containing_function = decl_function_context (containing_function);
+ }
+
+ if (!init && DECL_NONSTATIC_MEMBER_FUNCTION_P (containing_function)
+ && !LAMBDA_FUNCTION_P (containing_function))
+ /* First parameter is 'this'. */
+ init = DECL_ARGUMENTS (containing_function);
+
+ if (init)
+ this_capture = add_default_capture (lambda_stack,
+ /*id=*/get_identifier ("__this"),
+ init);
+ }
+
+ if (!this_capture)
+ {
+ error ("%<this%> was not captured for this lambda function");
+ result = error_mark_node;
+ }
+ else
+ {
+ /* To make sure that current_class_ref is for the lambda. */
+ gcc_assert (TYPE_MAIN_VARIANT (TREE_TYPE (current_class_ref)) == TREE_TYPE (lambda));
+
+ result = finish_non_static_data_member (this_capture,
+ current_class_ref,
+ /*qualifying_scope=*/NULL_TREE);
+
+ /* If 'this' is captured, each use of 'this' is transformed into an
+ access to the corresponding unnamed data member of the closure
+ type cast (_expr.cast_ 5.4) to the type of 'this'. [ The cast
+ ensures that the transformed expression is an rvalue. ] */
+ result = rvalue (result);
+ }
+
+ return result;
+}
+
+/* If the closure TYPE has a static op(), also add a conversion to function
+ pointer. */
+
+void
+maybe_add_lambda_conv_op (tree type)
+{
+ bool nested = (current_function_decl != NULL_TREE);
+ tree callop = lambda_function (type);
+ tree rettype, name, fntype, fn, body, compound_stmt;
+ tree thistype, stattype, statfn, convfn, call, arg;
+ VEC (tree, gc) *argvec;
+
+ if (LAMBDA_EXPR_CAPTURE_LIST (CLASSTYPE_LAMBDA_EXPR (type)) != NULL_TREE)
+ return;
+
+ stattype = build_function_type (TREE_TYPE (TREE_TYPE (callop)),
+ FUNCTION_ARG_CHAIN (callop));
+
+ /* First build up the conversion op. */
+
+ rettype = build_pointer_type (stattype);
+ name = mangle_conv_op_name_for_type (rettype);
+ thistype = cp_build_qualified_type (type, TYPE_QUAL_CONST);
+ fntype = build_method_type_directly (thistype, rettype, void_list_node);
+ fn = convfn = build_lang_decl (FUNCTION_DECL, name, fntype);
+ DECL_SOURCE_LOCATION (fn) = DECL_SOURCE_LOCATION (callop);
+
+ if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn
+ && DECL_ALIGN (fn) < 2 * BITS_PER_UNIT)
+ DECL_ALIGN (fn) = 2 * BITS_PER_UNIT;
+
+ SET_OVERLOADED_OPERATOR_CODE (fn, TYPE_EXPR);
+ grokclassfn (type, fn, NO_SPECIAL);
+ set_linkage_according_to_type (type, fn);
+ rest_of_decl_compilation (fn, toplevel_bindings_p (), at_eof);
+ DECL_IN_AGGR_P (fn) = 1;
+ DECL_ARTIFICIAL (fn) = 1;
+ DECL_NOT_REALLY_EXTERN (fn) = 1;
+ DECL_DECLARED_INLINE_P (fn) = 1;
+ DECL_ARGUMENTS (fn) = build_this_parm (fntype, TYPE_QUAL_CONST);
+ if (nested)
+ DECL_INTERFACE_KNOWN (fn) = 1;
+
+ add_method (type, fn, NULL_TREE);
+
+ /* Generic thunk code fails for varargs; we'll complain in mark_used if
+ the conversion op is used. */
+ if (varargs_function_p (callop))
+ {
+ DECL_DELETED_FN (fn) = 1;
+ return;
+ }
+
+ /* Now build up the thunk to be returned. */
+
+ name = get_identifier ("_FUN");
+ fn = statfn = build_lang_decl (FUNCTION_DECL, name, stattype);
+ DECL_SOURCE_LOCATION (fn) = DECL_SOURCE_LOCATION (callop);
+ if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn
+ && DECL_ALIGN (fn) < 2 * BITS_PER_UNIT)
+ DECL_ALIGN (fn) = 2 * BITS_PER_UNIT;
+ grokclassfn (type, fn, NO_SPECIAL);
+ set_linkage_according_to_type (type, fn);
+ rest_of_decl_compilation (fn, toplevel_bindings_p (), at_eof);
+ DECL_IN_AGGR_P (fn) = 1;
+ DECL_ARTIFICIAL (fn) = 1;
+ DECL_NOT_REALLY_EXTERN (fn) = 1;
+ DECL_DECLARED_INLINE_P (fn) = 1;
+ DECL_STATIC_FUNCTION_P (fn) = 1;
+ DECL_ARGUMENTS (fn) = copy_list (TREE_CHAIN (DECL_ARGUMENTS (callop)));
+ for (arg = DECL_ARGUMENTS (fn); arg; arg = TREE_CHAIN (arg))
+ DECL_CONTEXT (arg) = fn;
+ if (nested)
+ DECL_INTERFACE_KNOWN (fn) = 1;
+
+ add_method (type, fn, NULL_TREE);
+
+ if (nested)
+ push_function_context ();
+
+ /* Generate the body of the thunk. */
+
+ start_preparsed_function (statfn, NULL_TREE,
+ SF_PRE_PARSED | SF_INCLASS_INLINE);
+ if (DECL_ONE_ONLY (statfn))
+ {
+ /* Put the thunk in the same comdat group as the call op. */
+ struct cgraph_node *callop_node, *thunk_node;
+ DECL_COMDAT_GROUP (statfn) = DECL_COMDAT_GROUP (callop);
+ callop_node = cgraph_node (callop);
+ thunk_node = cgraph_node (statfn);
+ gcc_assert (callop_node->same_comdat_group == NULL);
+ gcc_assert (thunk_node->same_comdat_group == NULL);
+ callop_node->same_comdat_group = thunk_node;
+ thunk_node->same_comdat_group = callop_node;
+ }
+ body = begin_function_body ();
+ compound_stmt = begin_compound_stmt (0);
+
+ arg = build1 (NOP_EXPR, TREE_TYPE (DECL_ARGUMENTS (callop)),
+ null_pointer_node);
+ argvec = make_tree_vector ();
+ VEC_quick_push (tree, argvec, arg);
+ for (arg = DECL_ARGUMENTS (statfn); arg; arg = TREE_CHAIN (arg))
+ VEC_safe_push (tree, gc, argvec, arg);
+ call = build_cxx_call (callop, VEC_length (tree, argvec),
+ VEC_address (tree, argvec));
+ CALL_FROM_THUNK_P (call) = 1;
+ finish_return_stmt (call);
+
+ finish_compound_stmt (compound_stmt);
+ finish_function_body (body);
+
+ expand_or_defer_fn (finish_function (2));
+
+ /* Generate the body of the conversion op. */
+
+ start_preparsed_function (convfn, NULL_TREE,
+ SF_PRE_PARSED | SF_INCLASS_INLINE);
+ body = begin_function_body ();
+ compound_stmt = begin_compound_stmt (0);
+
+ finish_return_stmt (decay_conversion (statfn));
+
+ finish_compound_stmt (compound_stmt);
+ finish_function_body (body);
+
+ expand_or_defer_fn (finish_function (2));
+
+ if (nested)
+ pop_function_context ();
+}
#include "gt-cp-semantics.h"