X-Git-Url: http://git.sourceforge.jp/view?p=pf3gnuchains%2Fgcc-fork.git;a=blobdiff_plain;f=gcc%2Fcp%2Fsemantics.c;h=7bcd75690fc61234440bbedbdbb36f80e886bcd5;hp=b37b322746a7bcaba2fd3ac3545150afca054e55;hb=c0ba4ba865de0db4603365be06282f3848800d4c;hpb=aed4eb5dbf4e314cd58c8ab4aa7115029d5415e0 diff --git a/gcc/cp/semantics.c b/gcc/cp/semantics.c index b37b322746a..7bcd75690fc 100644 --- a/gcc/cp/semantics.c +++ b/gcc/cp/semantics.c @@ -55,6 +55,8 @@ along with GCC; see the file COPYING3. If not see static tree maybe_convert_cond (tree); static tree finalize_nrv_r (tree *, int *, void *); +static tree capture_decltype (tree); +static tree thisify_lambda_field (tree); /* Deferred Access Checking Overview @@ -420,7 +422,7 @@ maybe_cleanup_point_expr_void (tree expr) void add_decl_expr (tree decl) { - tree r = build_stmt (DECL_EXPR, decl); + tree r = build_stmt (input_location, DECL_EXPR, decl); if (DECL_INITIAL (decl) || (DECL_SIZE (decl) && TREE_SIDE_EFFECTS (DECL_SIZE (decl)))) r = maybe_cleanup_point_expr_void (r); @@ -441,7 +443,7 @@ do_poplevel (tree stmt_list) if (!processing_template_decl) { - stmt_list = c_build_bind_expr (block, stmt_list); + stmt_list = c_build_bind_expr (input_location, block, stmt_list); /* ??? See c_end_compound_stmt re statement expressions. */ } @@ -466,7 +468,7 @@ do_pushlevel (scope_kind sk) void push_cleanup (tree decl, tree cleanup, bool eh_only) { - tree stmt = build_stmt (CLEANUP_STMT, NULL, cleanup, decl); + tree stmt = build_stmt (input_location, CLEANUP_STMT, NULL, cleanup, decl); CLEANUP_EH_ONLY (stmt) = eh_only; add_stmt (stmt); CLEANUP_BODY (stmt) = push_stmt_list (); @@ -544,10 +546,8 @@ finish_goto_stmt (tree destination) TREE_USED (destination) = 1; else { - /* The DESTINATION is being used as an rvalue. */ if (!processing_template_decl) { - destination = decay_conversion (destination); destination = cp_convert (ptr_type_node, destination); if (error_operand_p (destination)) return NULL_TREE; @@ -561,7 +561,7 @@ finish_goto_stmt (tree destination) check_goto (destination); - return add_stmt (build_stmt (GOTO_EXPR, destination)); + return add_stmt (build_stmt (input_location, GOTO_EXPR, destination)); } /* COND is the condition-expression for an if, while, etc., @@ -624,7 +624,7 @@ finish_expr_stmt (tree expr) if (TREE_CODE (expr) != CLEANUP_POINT_EXPR) { if (TREE_CODE (expr) != EXPR_STMT) - expr = build_stmt (EXPR_STMT, expr); + expr = build_stmt (input_location, EXPR_STMT, expr); expr = maybe_cleanup_point_expr_void (expr); } @@ -645,7 +645,7 @@ begin_if_stmt (void) { tree r, scope; scope = do_pushlevel (sk_block); - r = build_stmt (IF_STMT, NULL_TREE, NULL_TREE, NULL_TREE); + r = build_stmt (input_location, IF_STMT, NULL_TREE, NULL_TREE, NULL_TREE); TREE_CHAIN (r) = scope; begin_cond (&IF_COND (r)); return r; @@ -707,7 +707,7 @@ tree begin_while_stmt (void) { tree r; - r = build_stmt (WHILE_STMT, NULL_TREE, NULL_TREE); + r = build_stmt (input_location, WHILE_STMT, NULL_TREE, NULL_TREE); add_stmt (r); WHILE_BODY (r) = do_pushlevel (sk_block); begin_cond (&WHILE_COND (r)); @@ -739,7 +739,7 @@ finish_while_stmt (tree while_stmt) tree begin_do_stmt (void) { - tree r = build_stmt (DO_STMT, NULL_TREE, NULL_TREE); + tree r = build_stmt (input_location, DO_STMT, NULL_TREE, NULL_TREE); add_stmt (r); DO_BODY (r) = push_stmt_list (); return r; @@ -801,7 +801,7 @@ finish_return_stmt (tree expr) } } - r = build_stmt (RETURN_EXPR, expr); + r = build_stmt (input_location, RETURN_EXPR, expr); TREE_NO_WARNING (r) |= no_warning; r = maybe_cleanup_point_expr_void (r); r = add_stmt (r); @@ -817,7 +817,7 @@ begin_for_stmt (void) { tree r; - r = build_stmt (FOR_STMT, NULL_TREE, NULL_TREE, + r = build_stmt (input_location, FOR_STMT, NULL_TREE, NULL_TREE, NULL_TREE, NULL_TREE); if (flag_new_for_scope > 0) @@ -908,7 +908,7 @@ finish_for_stmt (tree for_stmt) tree finish_break_stmt (void) { - return add_stmt (build_stmt (BREAK_STMT)); + return add_stmt (build_stmt (input_location, BREAK_STMT)); } /* Finish a continue-statement. */ @@ -916,7 +916,7 @@ finish_break_stmt (void) tree finish_continue_stmt (void) { - return add_stmt (build_stmt (CONTINUE_STMT)); + return add_stmt (build_stmt (input_location, CONTINUE_STMT)); } /* Begin a switch-statement. Returns a new SWITCH_STMT if @@ -927,7 +927,7 @@ begin_switch_stmt (void) { tree r, scope; - r = build_stmt (SWITCH_STMT, NULL_TREE, NULL_TREE, NULL_TREE); + r = build_stmt (input_location, SWITCH_STMT, NULL_TREE, NULL_TREE, NULL_TREE); scope = do_pushlevel (sk_block); TREE_CHAIN (r) = scope; @@ -997,7 +997,7 @@ finish_switch_stmt (tree switch_stmt) tree begin_try_block (void) { - tree r = build_stmt (TRY_BLOCK, NULL_TREE, NULL_TREE); + tree r = build_stmt (input_location, TRY_BLOCK, NULL_TREE, NULL_TREE); add_stmt (r); TRY_STMTS (r) = push_stmt_list (); return r; @@ -1087,7 +1087,7 @@ begin_handler (void) { tree r; - r = build_stmt (HANDLER, NULL_TREE, NULL_TREE); + r = build_stmt (input_location, HANDLER, NULL_TREE, NULL_TREE); add_stmt (r); /* Create a binding level for the eh_info and the exception object @@ -1200,12 +1200,13 @@ finish_compound_stmt (tree stmt) } /* Finish an asm-statement, whose components are a STRING, some - OUTPUT_OPERANDS, some INPUT_OPERANDS, and some CLOBBERS. Also note - whether the asm-statement should be considered volatile. */ + OUTPUT_OPERANDS, some INPUT_OPERANDS, some CLOBBERS and some + LABELS. Also note whether the asm-statement should be + considered volatile. */ tree finish_asm_stmt (int volatile_p, tree string, tree output_operands, - tree input_operands, tree clobbers) + tree input_operands, tree clobbers, tree labels) { tree r; tree t; @@ -1223,7 +1224,7 @@ finish_asm_stmt (int volatile_p, tree string, tree output_operands, oconstraints = (const char **) alloca (noutputs * sizeof (char *)); string = resolve_asm_operand_names (string, output_operands, - input_operands); + input_operands, labels); for (i = 0, t = output_operands; t; t = TREE_CHAIN (t), ++i) { @@ -1251,7 +1252,7 @@ finish_asm_stmt (int volatile_p, tree string, tree output_operands, effectively const. */ || (CLASS_TYPE_P (TREE_TYPE (operand)) && C_TYPE_FIELDS_READONLY (TREE_TYPE (operand))))) - readonly_error (operand, "assignment (via 'asm' output)"); + readonly_error (operand, REK_ASSIGNMENT_ASM); constraint = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t))); oconstraints[i] = constraint; @@ -1307,25 +1308,27 @@ finish_asm_stmt (int volatile_p, tree string, tree output_operands, } } - r = build_stmt (ASM_EXPR, string, + r = build_stmt (input_location, ASM_EXPR, string, output_operands, input_operands, - clobbers); + clobbers, labels); ASM_VOLATILE_P (r) = volatile_p || noutputs == 0; r = maybe_cleanup_point_expr_void (r); return add_stmt (r); } -/* Finish a label with the indicated NAME. */ +/* Finish a label with the indicated NAME. Returns the new label. */ tree finish_label_stmt (tree name) { tree decl = define_label (input_location, name); - if (decl == error_mark_node) + if (decl == error_mark_node) return error_mark_node; - return add_stmt (build_stmt (LABEL_EXPR, decl)); + add_stmt (build_stmt (input_location, LABEL_EXPR, decl)); + + return decl; } /* Finish a series of declarations for local labels. G++ allows users @@ -1421,7 +1424,7 @@ finish_non_static_data_member (tree decl, tree object, tree qualifying_scope) { gcc_assert (TREE_CODE (decl) == FIELD_DECL); - if (!object && skip_evaluation) + if (!object && cp_unevaluated_operand != 0) { /* DR 613: Can use non-static data members without an associated object in sizeof/decltype/alignof. */ @@ -1442,6 +1445,20 @@ finish_non_static_data_member (tree decl, tree object, tree qualifying_scope) return error_mark_node; } + + /* If decl is a non-capture field and object has a lambda type, + then we have a reference to a member of 'this' from a + lambda inside a non-static member function, and we must get to decl + through the 'this' capture. If decl is not a member of that object, + either, then its access will still fail later. */ + if (LAMBDA_TYPE_P (TREE_TYPE (object)) + && !LAMBDA_TYPE_P (DECL_CONTEXT (decl))) + object = cp_build_indirect_ref (lambda_expr_this_capture + (CLASSTYPE_LAMBDA_EXPR + (TREE_TYPE (object))), + RO_NULL, + /*complain=*/tf_warning_or_error); + if (current_class_ptr) TREE_USED (current_class_ptr) = 1; if (processing_template_decl && !qualifying_scope) @@ -1466,6 +1483,14 @@ finish_non_static_data_member (tree decl, tree object, tree qualifying_scope) return build_min (COMPONENT_REF, type, object, decl, NULL_TREE); } + /* If PROCESSING_TEMPLATE_DECL is nonzero here, then + QUALIFYING_SCOPE is also non-null. Wrap this in a SCOPE_REF + for now. */ + else if (processing_template_decl) + return build_qualified_name (TREE_TYPE (decl), + qualifying_scope, + DECL_NAME (decl), + /*template_p=*/false); else { tree access_type = TREE_TYPE (object); @@ -1485,15 +1510,6 @@ finish_non_static_data_member (tree decl, tree object, tree qualifying_scope) } } - /* If PROCESSING_TEMPLATE_DECL is nonzero here, then - QUALIFYING_SCOPE is also non-null. Wrap this in a SCOPE_REF - for now. */ - if (processing_template_decl) - return build_qualified_name (TREE_TYPE (decl), - qualifying_scope, - DECL_NAME (decl), - /*template_p=*/false); - perform_or_defer_access_check (TYPE_BINFO (access_type), decl, decl); @@ -1513,6 +1529,37 @@ finish_non_static_data_member (tree decl, tree object, tree qualifying_scope) } } +/* If we are currently parsing a template and we encountered a typedef + TYPEDEF_DECL that is being accessed though CONTEXT, this function + adds the typedef to a list tied to the current template. + At tempate instantiatin time, that list is walked and access check + performed for each typedef. + LOCATION is the location of the usage point of TYPEDEF_DECL. */ + +void +add_typedef_to_current_template_for_access_check (tree typedef_decl, + tree context, + location_t location) +{ + tree template_info = NULL; + tree cs = current_scope (); + + if (!is_typedef_decl (typedef_decl) + || !context + || !CLASS_TYPE_P (context) + || !cs) + return; + + if (CLASS_TYPE_P (cs) || TREE_CODE (cs) == FUNCTION_DECL) + template_info = get_template_info (cs); + + if (template_info + && TI_TEMPLATE (template_info) + && !currently_open_class (context)) + append_type_to_template_for_access_check (cs, typedef_decl, + context, location); +} + /* DECL was the declaration to which a qualified-id resolved. Issue an error message if it is not accessible. If OBJECT_TYPE is non-NULL, we have just seen `x->' or `x.' and OBJECT_TYPE is the @@ -1531,27 +1578,11 @@ check_accessibility_of_qualified_id (tree decl, add it to a list tied to the template. At template instantiation time, that list will be walked and access check performed. */ - if (is_typedef_decl (decl)) - { - /* This the scope through which type_decl is accessed. - It will be useful information later to do access check for - type_decl usage. */ - tree scope = nested_name_specifier - ? nested_name_specifier - : DECL_CONTEXT (decl); - tree templ_info = NULL; - tree cs = current_scope (); - - if (cs && (CLASS_TYPE_P (cs) || TREE_CODE (cs) == FUNCTION_DECL)) - templ_info = get_template_info (cs); - - if (templ_info - && TI_TEMPLATE (templ_info) - && scope - && CLASS_TYPE_P (scope) - && !currently_open_class (scope)) - append_type_to_template_for_access_check (current_scope (), decl, scope); - } + add_typedef_to_current_template_for_access_check (decl, + nested_name_specifier + ? nested_name_specifier + : DECL_CONTEXT (decl), + input_location); /* If we're not checking, return immediately. */ if (deferred_access_no_check) @@ -1659,6 +1690,7 @@ finish_qualified_id_expr (tree qualifying_class, else if (BASELINK_P (expr) && !processing_template_decl) { tree fns; + tree ob; /* See if any of the functions are non-static members. */ fns = BASELINK_FUNCTIONS (expr); @@ -1666,10 +1698,10 @@ finish_qualified_id_expr (tree qualifying_class, fns = TREE_OPERAND (fns, 0); /* If so, the expression may be relative to 'this'. */ if (!shared_member_p (fns) - && current_class_ref - && DERIVED_FROM_P (qualifying_class, TREE_TYPE (current_class_ref))) + && (ob = maybe_dummy_object (qualifying_class, NULL), + !is_dummy_object (ob))) expr = (build_class_member_access_expr - (maybe_dummy_object (qualifying_class, NULL), + (ob, expr, BASELINK_ACCESS_BINFO (expr), /*preserve_reference=*/false, @@ -1716,7 +1748,7 @@ finish_stmt_expr_expr (tree expr, tree stmt_expr) if (processing_template_decl) { - expr = build_stmt (EXPR_STMT, expr); + expr = build_stmt (input_location, EXPR_STMT, expr); expr = add_stmt (expr); /* Mark the last statement so that we can recognize it as such at template-instantiation time. */ @@ -1814,6 +1846,35 @@ stmt_expr_value_expr (tree stmt_expr) return t; } +/* Return TRUE iff EXPR_STMT is an empty list of + expression statements. */ + +bool +empty_expr_stmt_p (tree expr_stmt) +{ + tree body = NULL_TREE; + + if (expr_stmt == void_zero_node) + return true; + + if (expr_stmt) + { + if (TREE_CODE (expr_stmt) == EXPR_STMT) + body = EXPR_STMT_EXPR (expr_stmt); + else if (TREE_CODE (expr_stmt) == STATEMENT_LIST) + body = expr_stmt; + } + + if (body) + { + if (TREE_CODE (body) == STATEMENT_LIST) + return tsi_end_p (tsi_start (body)); + else + return empty_expr_stmt_p (body); + } + return false; +} + /* Perform Koenig lookup. FN is the postfix-expression representing the function (or functions) to call; ARGS are the arguments to the call. Returns the functions to be considered by overload @@ -1825,9 +1886,12 @@ perform_koenig_lookup (tree fn, VEC(tree,gc) *args) tree identifier = NULL_TREE; tree functions = NULL_TREE; tree tmpl_args = NULL_TREE; + bool template_id = false; if (TREE_CODE (fn) == TEMPLATE_ID_EXPR) { + /* Use a separate flag to handle null args. */ + template_id = true; tmpl_args = TREE_OPERAND (fn, 1); fn = TREE_OPERAND (fn, 0); } @@ -1859,8 +1923,8 @@ perform_koenig_lookup (tree fn, VEC(tree,gc) *args) fn = unqualified_fn_lookup_error (identifier); } - if (fn && tmpl_args) - fn = build_nt (TEMPLATE_ID_EXPR, fn, tmpl_args); + if (fn && template_id) + fn = build2 (TEMPLATE_ID_EXPR, unknown_type_node, fn, tmpl_args); return fn; } @@ -1939,31 +2003,18 @@ finish_call_expr (tree fn, VEC(tree,gc) **args, bool disallow_virtual, . operator.... [Otherwise] a contrived object of type T becomes the implied object argument. - This paragraph is unclear about this situation: + In this situation: struct A { void f(); }; struct B : public A {}; struct C : public A { void g() { B::f(); }}; - In particular, for `B::f', this paragraph does not make clear - whether "the class of that member function" refers to `A' or - to `B'. We believe it refers to `B'. */ - if (current_class_type - && DERIVED_FROM_P (BINFO_TYPE (BASELINK_ACCESS_BINFO (fn)), - current_class_type) - && current_class_ref) - object = maybe_dummy_object (BINFO_TYPE (BASELINK_ACCESS_BINFO (fn)), - NULL); - else - { - tree representative_fn; + "the class of that member function" refers to `A'. But 11.2 + [class.access.base] says that we need to convert 'this' to B* as + part of the access, so we pass 'B' to maybe_dummy_object. */ - representative_fn = BASELINK_FUNCTIONS (fn); - if (TREE_CODE (representative_fn) == TEMPLATE_ID_EXPR) - representative_fn = TREE_OPERAND (representative_fn, 0); - representative_fn = get_first_fn (representative_fn); - object = build_dummy_object (DECL_CONTEXT (representative_fn)); - } + object = maybe_dummy_object (BINFO_TYPE (BASELINK_ACCESS_BINFO (fn)), + NULL); if (processing_template_decl) { @@ -1988,7 +2039,7 @@ finish_call_expr (tree fn, VEC(tree,gc) **args, bool disallow_virtual, if (TREE_CODE (fn) == FUNCTION_DECL && (DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL || DECL_BUILT_IN_CLASS (fn) == BUILT_IN_MD)) - result = resolve_overloaded_builtin (fn, *args); + result = resolve_overloaded_builtin (input_location, fn, *args); if (!result) /* A call to a namespace-scope function. */ @@ -2043,7 +2094,14 @@ finish_this_expr (void) if (current_class_ptr) { - result = current_class_ptr; + tree type = TREE_TYPE (current_class_ref); + + /* In a lambda expression, 'this' refers to the captured 'this'. */ + if (LAMBDA_TYPE_P (type)) + result = lambda_expr_this_capture (CLASSTYPE_LAMBDA_EXPR (type)); + else + result = current_class_ptr; + } else if (current_function_decl && DECL_STATIC_FUNCTION_P (current_function_decl)) @@ -2135,7 +2193,7 @@ finish_unary_op_expr (enum tree_code code, tree expr) TREE_NEGATED_INT (result) = 1; } if (TREE_OVERFLOW_P (result) && !TREE_OVERFLOW_P (expr)) - overflow_warning (result); + overflow_warning (input_location, result); return result; } @@ -2189,6 +2247,7 @@ finish_compound_literal (tree type, tree compound_literal) tree decl = create_temporary_var (type); DECL_INITIAL (decl) = compound_literal; TREE_STATIC (decl) = 1; + cp_apply_type_quals_to_decl (cp_type_quals (type), decl); decl = pushdecl_top_level (decl); DECL_NAME (decl) = make_anon_name (); SET_DECL_ASSEMBLER_NAME (decl, DECL_NAME (decl)); @@ -2248,7 +2307,8 @@ finish_template_type_parm (tree aggr, tree identifier) tree finish_template_template_parm (tree aggr, tree identifier) { - tree decl = build_decl (TYPE_DECL, identifier, NULL_TREE); + tree decl = build_decl (input_location, + TYPE_DECL, identifier, NULL_TREE); tree tmpl = build_lang_decl (TEMPLATE_DECL, identifier, NULL_TREE); DECL_TEMPLATE_PARMS (tmpl) = current_template_parms; DECL_TEMPLATE_RESULT (tmpl) = decl; @@ -2299,6 +2359,26 @@ begin_class_definition (tree t, tree attributes) error ("definition of %q#T inside template parameter list", t); return error_mark_node; } + + /* According to the C++ ABI, decimal classes defined in ISO/IEC TR 24733 + are passed the same as decimal scalar types. */ + if (TREE_CODE (t) == RECORD_TYPE + && !processing_template_decl) + { + tree ns = TYPE_CONTEXT (t); + if (ns && TREE_CODE (ns) == NAMESPACE_DECL + && DECL_CONTEXT (ns) == std_node + && DECL_NAME (ns) + && !strcmp (IDENTIFIER_POINTER (DECL_NAME (ns)), "decimal")) + { + const char *n = TYPE_NAME_STRING (t); + if ((strcmp (n, "decimal32") == 0) + || (strcmp (n, "decimal64") == 0) + || (strcmp (n, "decimal128") == 0)) + TYPE_TRANSPARENT_AGGR (t) = 1; + } + } + /* A non-implicit typename comes from code like: template struct A { @@ -2317,9 +2397,6 @@ begin_class_definition (tree t, tree attributes) pushtag (make_anon_name (), t, /*tag_scope=*/ts_current); } - /* Update the location of the decl. */ - DECL_SOURCE_LOCATION (TYPE_NAME (t)) = input_location; - if (TYPE_BEING_DEFINED (t)) { t = make_class_type (TREE_CODE (t)); @@ -2560,20 +2637,21 @@ qualified_name_lookup_error (tree scope, tree name, else if (TYPE_P (scope)) { if (!COMPLETE_TYPE_P (scope)) - error ("%Hincomplete type %qT used in nested name specifier", - &location, scope); + error_at (location, "incomplete type %qT used in nested name specifier", + scope); else if (TREE_CODE (decl) == TREE_LIST) { - error ("%Hreference to %<%T::%D%> is ambiguous", &location, scope, name); + error_at (location, "reference to %<%T::%D%> is ambiguous", + scope, name); print_candidates (decl); } else - error ("%H%qD is not a member of %qT", &location, name, scope); + error_at (location, "%qD is not a member of %qT", name, scope); } else if (scope != global_namespace) - error ("%H%qD is not a member of %qD", &location, name, scope); + error_at (location, "%qD is not a member of %qD", name, scope); else - error ("%H%<::%D%> has not been declared", &location, name); + error_at (location, "%<::%D%> has not been declared", name); } /* If FNS is a member function, a set of member functions, or a @@ -2602,7 +2680,32 @@ baselink_for_fns (tree fns) if (!cl) cl = DECL_CONTEXT (fn); cl = TYPE_BINFO (cl); - return build_baselink (cl, cl, fns, /*optype=*/NULL_TREE); + return build_baselink (TYPE_BINFO (DECL_CONTEXT (fn)), cl, fns, + /*optype=*/NULL_TREE); +} + +/* Returns true iff DECL is an automatic variable from a function outside + the current one. */ + +static bool +outer_automatic_var_p (tree decl) +{ + return ((TREE_CODE (decl) == VAR_DECL || TREE_CODE (decl) == PARM_DECL) + && DECL_FUNCTION_SCOPE_P (decl) + && !TREE_STATIC (decl) + && DECL_CONTEXT (decl) != current_function_decl); +} + +/* Returns true iff DECL is a capture field from a lambda that is not our + immediate context. */ + +static bool +outer_lambda_capture_p (tree decl) +{ + return (TREE_CODE (decl) == FIELD_DECL + && LAMBDA_TYPE_P (DECL_CONTEXT (decl)) + && (!current_class_type + || !DERIVED_FROM_P (DECL_CONTEXT (decl), current_class_type))); } /* ID_EXPRESSION is a representation of parsed, but unprocessed, @@ -2706,12 +2809,70 @@ finish_id_expression (tree id_expression, if (!scope && decl != error_mark_node) maybe_note_name_used_in_class (id_expression, decl); - /* Disallow uses of local variables from containing functions. */ - if (TREE_CODE (decl) == VAR_DECL || TREE_CODE (decl) == PARM_DECL) + /* Disallow uses of local variables from containing functions, except + within lambda-expressions. */ + if ((outer_automatic_var_p (decl) + || outer_lambda_capture_p (decl)) + /* It's not a use (3.2) if we're in an unevaluated context. */ + && !cp_unevaluated_operand) { - tree context = decl_function_context (decl); - if (context != NULL_TREE && context != current_function_decl - && ! TREE_STATIC (decl)) + tree context = DECL_CONTEXT (decl); + tree containing_function = current_function_decl; + tree lambda_stack = NULL_TREE; + tree lambda_expr = NULL_TREE; + tree initializer = decl; + + /* Core issue 696: "[At the July 2009 meeting] the CWG expressed + support for an approach in which a reference to a local + [constant] automatic variable in a nested class or lambda body + would enter the expression as an rvalue, which would reduce + the complexity of the problem" + + FIXME update for final resolution of core issue 696. */ + if (DECL_INTEGRAL_CONSTANT_VAR_P (decl)) + return integral_constant_value (decl); + + if (TYPE_P (context)) + { + /* Implicit capture of an explicit capture. */ + context = lambda_function (context); + initializer = thisify_lambda_field (decl); + } + + /* If we are in a lambda function, we can move out until we hit + 1. the context, + 2. a non-lambda function, or + 3. a non-default capturing lambda function. */ + while (context != containing_function + && LAMBDA_FUNCTION_P (containing_function)) + { + lambda_expr = CLASSTYPE_LAMBDA_EXPR + (DECL_CONTEXT (containing_function)); + + if (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda_expr) + == CPLD_NONE) + break; + + lambda_stack = tree_cons (NULL_TREE, + lambda_expr, + lambda_stack); + + containing_function + = decl_function_context (containing_function); + } + + if (context == containing_function) + { + decl = add_default_capture (lambda_stack, + /*id=*/DECL_NAME (decl), + initializer); + } + else if (lambda_expr) + { + error ("%qD is not captured", decl); + return error_mark_node; + } + else { error (TREE_CODE (decl) == VAR_DECL ? "use of % variable from containing function" @@ -3122,10 +3283,11 @@ simplify_aggr_init_expr (tree *tp) style = arg; } - call_expr = build_call_array (TREE_TYPE (TREE_TYPE (TREE_TYPE (fn))), - fn, - aggr_init_expr_nargs (aggr_init_expr), - AGGR_INIT_EXPR_ARGP (aggr_init_expr)); + call_expr = build_call_array_loc (input_location, + TREE_TYPE (TREE_TYPE (TREE_TYPE (fn))), + fn, + aggr_init_expr_nargs (aggr_init_expr), + AGGR_INIT_EXPR_ARGP (aggr_init_expr)); if (style == ctor) { @@ -3178,7 +3340,9 @@ emit_associated_thunks (tree fn) is so that you can know statically the entire set of thunks that will ever be needed for a given virtual function, thereby enabling you to output all the thunks with the function itself. */ - if (DECL_VIRTUAL_P (fn)) + if (DECL_VIRTUAL_P (fn) + /* Do not emit thunks for extern template instantiations. */ + && ! DECL_REALLY_EXTERN (fn)) { tree thunk; @@ -3204,8 +3368,8 @@ emit_associated_thunks (tree fn) /* Generate RTL for FN. */ -void -expand_or_defer_fn (tree fn) +bool +expand_or_defer_fn_1 (tree fn) { /* When the parser calls us after finishing the body of a template function, we don't really want to expand the body. */ @@ -3219,10 +3383,10 @@ expand_or_defer_fn (tree fn) is not a GC root. */ if (!function_depth) ggc_collect (); - return; + return false; } - gcc_assert (gimple_body (fn)); + gcc_assert (DECL_SAVED_TREE (fn)); /* If this is a constructor or destructor body, we have to clone it. */ @@ -3231,7 +3395,8 @@ expand_or_defer_fn (tree fn) /* We don't want to process FN again, so pretend we've written it out, even though we haven't. */ TREE_ASM_WRITTEN (fn) = 1; - return; + DECL_SAVED_TREE (fn) = NULL_TREE; + return false; } /* We make a decision about linkage for these functions at the end @@ -3270,7 +3435,9 @@ expand_or_defer_fn (tree fn) this function as needed so that finish_file will make sure to output it later. Similarly, all dllexport'd functions must be emitted; there may be callers in other DLLs. */ - if ((flag_keep_inline_functions && DECL_DECLARED_INLINE_P (fn)) + if ((flag_keep_inline_functions + && DECL_DECLARED_INLINE_P (fn) + && !DECL_REALLY_EXTERN (fn)) || lookup_attribute ("dllexport", DECL_ATTRIBUTES (fn))) mark_needed (fn); } @@ -3278,14 +3445,24 @@ expand_or_defer_fn (tree fn) /* There's no reason to do any of the work here if we're only doing semantic analysis; this code just generates RTL. */ if (flag_syntax_only) - return; + return false; - function_depth++; + return true; +} - /* Expand or defer, at the whim of the compilation unit manager. */ - cgraph_finalize_function (fn, function_depth > 1); +void +expand_or_defer_fn (tree fn) +{ + if (expand_or_defer_fn_1 (fn)) + { + function_depth++; - function_depth--; + /* Expand or defer, at the whim of the compilation unit manager. */ + cgraph_finalize_function (fn, function_depth > 1); + emit_associated_thunks (fn); + + function_depth--; + } } struct nrv_data @@ -3328,9 +3505,9 @@ finalize_nrv_r (tree* tp, int* walk_subtrees, void* data) init = build2 (INIT_EXPR, void_type_node, dp->result, DECL_INITIAL (dp->var)); else - init = build_empty_stmt (); + init = build_empty_stmt (EXPR_LOCATION (*tp)); DECL_INITIAL (dp->var) = NULL_TREE; - SET_EXPR_LOCUS (init, EXPR_LOCUS (*tp)); + SET_EXPR_LOCATION (init, EXPR_LOCATION (*tp)); *tp = init; } /* And replace all uses of the NRV with the RESULT_DECL. */ @@ -3858,7 +4035,7 @@ finish_omp_threadprivate (tree vars) /* Make sure that DECL_DISCRIMINATOR_P continues to be true after the allocation of the lang_decl structure. */ if (DECL_DISCRIMINATOR_P (v)) - DECL_LANG_SPECIFIC (v)->decl_flags.u2sel = 1; + DECL_LANG_SPECIFIC (v)->u.base.u2sel = 1; } if (! DECL_THREAD_LOCAL_P (v)) @@ -3981,7 +4158,7 @@ handle_omp_for_class_iterator (int i, location_t locus, tree declv, tree initv, } if (cond == error_mark_node) { - error ("%Hinvalid controlling predicate", &elocus); + error_at (elocus, "invalid controlling predicate"); return true; } diff = build_x_binary_op (MINUS_EXPR, TREE_OPERAND (cond, 1), @@ -3991,8 +4168,8 @@ handle_omp_for_class_iterator (int i, location_t locus, tree declv, tree initv, return true; if (TREE_CODE (TREE_TYPE (diff)) != INTEGER_TYPE) { - error ("%Hdifference between %qE and %qD does not have integer type", - &elocus, TREE_OPERAND (cond, 1), iter); + error_at (elocus, "difference between %qE and %qD does not have integer type", + TREE_OPERAND (cond, 1), iter); return true; } @@ -4086,7 +4263,7 @@ handle_omp_for_class_iterator (int i, location_t locus, tree declv, tree initv, if (incr == error_mark_node) { - error ("%Hinvalid increment expression", &elocus); + error_at (elocus, "invalid increment expression"); return true; } @@ -4134,7 +4311,7 @@ handle_omp_for_class_iterator (int i, location_t locus, tree declv, tree initv, TREE_CODE (cond), decl, diff, tf_warning_or_error); incr = build_modify_expr (elocus, decl, NULL_TREE, PLUS_EXPR, - incr, NULL_TREE); + elocus, incr, NULL_TREE); orig_body = *body; *body = push_stmt_list (); @@ -4213,8 +4390,8 @@ finish_omp_for (location_t locus, tree declv, tree initv, tree condv, if (decl == NULL) { - error ("%Hexpected iteration declaration or initialization", - &locus); + error_at (locus, + "expected iteration declaration or initialization"); return NULL; } } @@ -4224,13 +4401,13 @@ finish_omp_for (location_t locus, tree declv, tree initv, tree condv, if (cond == NULL) { - error ("%Hmissing controlling predicate", &elocus); + error_at (elocus, "missing controlling predicate"); return NULL; } if (incr == NULL) { - error ("%Hmissing increment expression", &elocus); + error_at (elocus, "missing increment expression"); return NULL; } @@ -4284,8 +4461,7 @@ finish_omp_for (location_t locus, tree declv, tree initv, tree condv, if (!DECL_P (decl)) { - error ("%Hexpected iteration declaration or initialization", - &elocus); + error_at (elocus, "expected iteration declaration or initialization"); return NULL; } @@ -4310,7 +4486,7 @@ finish_omp_for (location_t locus, tree declv, tree initv, tree condv, if (!INTEGRAL_TYPE_P (TREE_TYPE (decl)) && TREE_CODE (TREE_TYPE (decl)) != POINTER_TYPE) { - error ("%Hinvalid type for iteration variable %qE", &elocus, decl); + error_at (elocus, "invalid type for iteration variable %qE", decl); return NULL; } @@ -4425,7 +4601,7 @@ finish_omp_atomic (enum tree_code code, tree lhs, tree rhs) } if (!dependent_p) { - stmt = c_finish_omp_atomic (code, lhs, rhs); + stmt = c_finish_omp_atomic (input_location, code, lhs, rhs); if (stmt == error_mark_node) return; } @@ -4525,27 +4701,21 @@ finish_static_assert (tree condition, tree message, location_t location, } } -/* Returns decltype((EXPR)) for cases where we can drop the decltype and - just return the type even though EXPR is a type-dependent expression. - The ABI specifies which cases this applies to, which is a subset of the - possible cases. */ +/* Returns the type of EXPR for cases where we can determine it even though + EXPR is a type-dependent expression. */ tree describable_type (tree expr) { tree type = NULL_TREE; - /* processing_template_decl isn't set when we're called from the mangling - code, so bump it now. */ - ++processing_template_decl; if (! type_dependent_expression_p (expr) && ! type_unknown_p (expr)) { - type = TREE_TYPE (expr); + type = unlowered_expr_type (expr); if (real_lvalue_p (expr)) type = build_reference_type (type); } - --processing_template_decl; if (type) return type; @@ -4556,8 +4726,7 @@ describable_type (tree expr) case PARM_DECL: case RESULT_DECL: case FUNCTION_DECL: - /* Named rvalue reference becomes lvalue. */ - type = build_reference_type (non_reference (TREE_TYPE (expr))); + return TREE_TYPE (expr); break; case NEW_EXPR: @@ -4616,7 +4785,13 @@ finish_decltype_type (tree expr, bool id_expression_or_member_access_p) return error_mark_node; } - if (type_dependent_expression_p (expr)) + if (type_dependent_expression_p (expr) + /* In a template, a COMPONENT_REF has an IDENTIFIER_NODE for op1 even + if it isn't dependent, so that we can check access control at + instantiation time, so defer the decltype as well (PR 42277). */ + || (id_expression_or_member_access_p + && processing_template_decl + && TREE_CODE (expr) == COMPONENT_REF)) { if (id_expression_or_member_access_p) { @@ -4639,6 +4814,7 @@ finish_decltype_type (tree expr, bool id_expression_or_member_access_p) if (type && !type_uses_auto (type)) return type; + treat_as_dependent: type = cxx_make_type (DECLTYPE_TYPE); DECLTYPE_TYPE_EXPR (type) = expr; DECLTYPE_TYPE_ID_EXPR_OR_MEMBER_ACCESS_P (type) @@ -4650,6 +4826,15 @@ finish_decltype_type (tree expr, bool id_expression_or_member_access_p) /* The type denoted by decltype(e) is defined as follows: */ + expr = resolve_nondeduced_context (expr); + + /* To get the size of a static data member declared as an array of + unknown bound, we need to instantiate it. */ + if (TREE_CODE (expr) == VAR_DECL + && VAR_HAD_UNKNOWN_BOUND (expr) + && DECL_TEMPLATE_INSTANTIATION (expr)) + instantiate_decl (expr, /*defer_ok*/true, /*expl_inst_mem*/false); + if (id_expression_or_member_access_p) { /* If e is an id-expression or a class member access (5.2.5 @@ -4675,9 +4860,13 @@ finish_decltype_type (tree expr, bool id_expression_or_member_access_p) /* See through BASELINK nodes to the underlying functions. */ expr = BASELINK_FUNCTIONS (expr); + if (TREE_CODE (expr) == TEMPLATE_ID_EXPR) + expr = TREE_OPERAND (expr, 0); + if (TREE_CODE (expr) == OVERLOAD) { - if (OVL_CHAIN (expr)) + if (OVL_CHAIN (expr) + || TREE_CODE (OVL_FUNCTION (expr)) == TEMPLATE_DECL) { error ("%qE refers to a set of overloaded functions", orig_expr); return error_mark_node; @@ -4761,6 +4950,11 @@ finish_decltype_type (tree expr, bool id_expression_or_member_access_p) && (TREE_CODE (TREE_TYPE (target_type)) == FUNCTION_TYPE || TREE_CODE (TREE_TYPE (target_type)) == METHOD_TYPE)) type = TREE_TYPE (TREE_TYPE (target_type)); + else if (processing_template_decl) + /* Within a template finish_call_expr doesn't resolve + CALL_EXPR_FN, so even though this decltype isn't really + dependent let's defer resolving it. */ + goto treat_as_dependent; else sorry ("unable to determine the declared type of expression %<%E%>", expr); @@ -4781,6 +4975,18 @@ finish_decltype_type (tree expr, bool id_expression_or_member_access_p) if (real_lvalue_p (expr)) type = build_reference_type (type); } + /* Within a lambda-expression: + + Every occurrence of decltype((x)) where x is a possibly + parenthesized id-expression that names an entity of + automatic storage duration is treated as if x were + transformed into an access to a corresponding data member + of the closure type that would have been declared if x + were a use of the denoted entity. */ + else if (outer_automatic_var_p (expr) + && current_function_decl + && LAMBDA_FUNCTION_P (current_function_decl)) + type = capture_decltype (expr); else { /* Otherwise, where T is the type of e, if e is an lvalue, @@ -4835,6 +5041,8 @@ classtype_has_nothrow_assign_or_copy_p (tree type, bool assign_p) it now. */ if (CLASSTYPE_LAZY_COPY_CTOR (type)) lazily_declare_fn (sfk_copy_constructor, type); + if (CLASSTYPE_LAZY_MOVE_CTOR (type)) + lazily_declare_fn (sfk_move_constructor, type); fns = CLASSTYPE_CONSTRUCTORS (type); } else @@ -4872,6 +5080,7 @@ trait_expr_value (cp_trait_kind kind, tree type1, tree type2) switch (kind) { case CPTK_HAS_NOTHROW_ASSIGN: + type1 = strip_array_types (type1); return (!CP_TYPE_CONST_P (type1) && type_code1 != REFERENCE_TYPE && (trait_expr_value (CPTK_HAS_TRIVIAL_ASSIGN, type1, type2) || (CLASS_TYPE_P (type1) @@ -4879,8 +5088,11 @@ trait_expr_value (cp_trait_kind kind, tree type1, tree type2) true)))); case CPTK_HAS_TRIVIAL_ASSIGN: + /* ??? The standard seems to be missing the "or array of such a class + type" wording for this trait. */ + type1 = strip_array_types (type1); return (!CP_TYPE_CONST_P (type1) && type_code1 != REFERENCE_TYPE - && (pod_type_p (type1) + && (trivial_type_p (type1) || (CLASS_TYPE_P (type1) && TYPE_HAS_TRIVIAL_ASSIGN_REF (type1)))); @@ -4893,21 +5105,25 @@ trait_expr_value (cp_trait_kind kind, tree type1, tree type2) case CPTK_HAS_TRIVIAL_CONSTRUCTOR: type1 = strip_array_types (type1); - return (pod_type_p (type1) + return (trivial_type_p (type1) || (CLASS_TYPE_P (type1) && TYPE_HAS_TRIVIAL_DFLT (type1))); case CPTK_HAS_NOTHROW_COPY: + type1 = strip_array_types (type1); return (trait_expr_value (CPTK_HAS_TRIVIAL_COPY, type1, type2) || (CLASS_TYPE_P (type1) && classtype_has_nothrow_assign_or_copy_p (type1, false))); case CPTK_HAS_TRIVIAL_COPY: - return (pod_type_p (type1) || type_code1 == REFERENCE_TYPE + /* ??? The standard seems to be missing the "or array of such a class + type" wording for this trait. */ + type1 = strip_array_types (type1); + return (trivial_type_p (type1) || type_code1 == REFERENCE_TYPE || (CLASS_TYPE_P (type1) && TYPE_HAS_TRIVIAL_INIT_REF (type1))); case CPTK_HAS_TRIVIAL_DESTRUCTOR: type1 = strip_array_types (type1); - return (pod_type_p (type1) || type_code1 == REFERENCE_TYPE + return (trivial_type_p (type1) || type_code1 == REFERENCE_TYPE || (CLASS_TYPE_P (type1) && TYPE_HAS_TRIVIAL_DESTRUCTOR (type1))); @@ -4941,6 +5157,12 @@ trait_expr_value (cp_trait_kind kind, tree type1, tree type2) case CPTK_IS_POLYMORPHIC: return (CLASS_TYPE_P (type1) && TYPE_POLYMORPHIC_P (type1)); + case CPTK_IS_STD_LAYOUT: + return (std_layout_type_p (type1)); + + case CPTK_IS_TRIVIAL: + return (trivial_type_p (type1)); + case CPTK_IS_UNION: return (type_code1 == UNION_TYPE); @@ -4989,6 +5211,8 @@ finish_trait_expr (cp_trait_kind kind, tree type1, tree type2) || kind == CPTK_IS_ENUM || kind == CPTK_IS_POD || kind == CPTK_IS_POLYMORPHIC + || kind == CPTK_IS_STD_LAYOUT + || kind == CPTK_IS_TRIVIAL || kind == CPTK_IS_UNION); if (kind == CPTK_IS_CONVERTIBLE_TO) @@ -5030,6 +5254,8 @@ finish_trait_expr (cp_trait_kind kind, tree type1, tree type2) case CPTK_IS_EMPTY: case CPTK_IS_POD: case CPTK_IS_POLYMORPHIC: + case CPTK_IS_STD_LAYOUT: + case CPTK_IS_TRIVIAL: if (!check_trait_type (type1)) { error ("incomplete type %qT not allowed", type1); @@ -5080,4 +5306,716 @@ float_const_decimal64_p (void) return 0; } +/* Return true if T is a literal type. */ + +bool +literal_type_p (tree t) +{ + if (SCALAR_TYPE_P (t)) + return true; + if (CLASS_TYPE_P (t)) + return CLASSTYPE_LITERAL_P (t); + if (TREE_CODE (t) == ARRAY_TYPE) + return literal_type_p (strip_array_types (t)); + return false; +} + + +/* If DECL is a variable declared `constexpr', require its type + be literal. Return the DECL if OK, otherwise NULL. */ + +tree +ensure_literal_type_for_constexpr_object (tree decl) +{ + tree type = TREE_TYPE (decl); + if (TREE_CODE (decl) == VAR_DECL && DECL_DECLARED_CONSTEXPR_P (decl) + && !processing_template_decl && !literal_type_p (type)) + { + error ("the type %qT of constexpr variable %qD is not literal", + type, decl); + return NULL; + } + return decl; +} + +/* Return non-null if FUN certainly designates a valid constexpr function + declaration. Otherwise return NULL. Issue appropriate diagnostics + if necessary. Note that we only check the declaration, not the body + of the function. */ + +tree +validate_constexpr_fundecl (tree fun) +{ + tree rettype = NULL; + tree parm = NULL; + + /* Don't bother if FUN is not marked constexpr. */ + if (!DECL_DECLARED_CONSTEXPR_P (fun)) + return NULL; + + /* For a function template, we have absolutely no guarantee that all + instantiations will be constexpr. */ + if (TREE_CODE (fun) == TEMPLATE_DECL) + return NULL; + + parm = FUNCTION_FIRST_USER_PARM (fun); + for (; parm != NULL; parm = TREE_CHAIN (parm)) + { + tree type = TREE_TYPE (parm); + if (dependent_type_p (type)) + return NULL; + if (!literal_type_p (type)) + { + error ("parameter %q#D is not of literal type", parm); + return NULL; + } + } + + if (DECL_CONSTRUCTOR_P (fun)) + return fun; + + rettype = TREE_TYPE (TREE_TYPE (fun)); + if (dependent_type_p (rettype)) + return NULL; + if (!literal_type_p (rettype)) + { + error ("return type %qT of function %qD is not a literal type", + TREE_TYPE (TREE_TYPE (fun)), fun); + return NULL; + } + return fun; +} + + +/* Constructor for a lambda expression. */ + +tree +build_lambda_expr (void) +{ + tree lambda = make_node (LAMBDA_EXPR); + LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda) = CPLD_NONE; + LAMBDA_EXPR_CAPTURE_LIST (lambda) = NULL_TREE; + LAMBDA_EXPR_THIS_CAPTURE (lambda) = NULL_TREE; + LAMBDA_EXPR_RETURN_TYPE (lambda) = NULL_TREE; + LAMBDA_EXPR_MUTABLE_P (lambda) = false; + return lambda; +} + +/* Create the closure object for a LAMBDA_EXPR. */ + +tree +build_lambda_object (tree lambda_expr) +{ + /* Build aggregate constructor call. + - cp_parser_braced_list + - cp_parser_functional_cast */ + VEC(constructor_elt,gc) *elts = NULL; + tree node, expr, type; + location_t saved_loc; + + if (processing_template_decl) + return lambda_expr; + + /* Make sure any error messages refer to the lambda-introducer. */ + saved_loc = input_location; + input_location = LAMBDA_EXPR_LOCATION (lambda_expr); + + for (node = LAMBDA_EXPR_CAPTURE_LIST (lambda_expr); + node; + node = TREE_CHAIN (node)) + { + tree field = TREE_PURPOSE (node); + tree val = TREE_VALUE (node); + + if (DECL_P (val)) + mark_used (val); + + /* Mere mortals can't copy arrays with aggregate initialization, so + do some magic to make it work here. */ + if (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE) + val = build_array_copy (val); + else if (DECL_NORMAL_CAPTURE_P (field) + && TREE_CODE (TREE_TYPE (field)) != REFERENCE_TYPE) + { + /* "the entities that are captured by copy are used to + direct-initialize each corresponding non-static data + member of the resulting closure object." + + There's normally no way to express direct-initialization + from an element of a CONSTRUCTOR, so we build up a special + TARGET_EXPR to bypass the usual copy-initialization. */ + val = force_rvalue (val); + if (TREE_CODE (val) == TARGET_EXPR) + TARGET_EXPR_DIRECT_INIT_P (val) = true; + } + + CONSTRUCTOR_APPEND_ELT (elts, DECL_NAME (field), val); + } + + expr = build_constructor (init_list_type_node, elts); + CONSTRUCTOR_IS_DIRECT_INIT (expr) = 1; + + /* N2927: "[The closure] class type is not an aggregate." + But we briefly treat it as an aggregate to make this simpler. */ + type = TREE_TYPE (lambda_expr); + CLASSTYPE_NON_AGGREGATE (type) = 0; + expr = finish_compound_literal (type, expr); + CLASSTYPE_NON_AGGREGATE (type) = 1; + + input_location = saved_loc; + return expr; +} + +/* Return an initialized RECORD_TYPE for LAMBDA. + LAMBDA must have its explicit captures already. */ + +tree +begin_lambda_type (tree lambda) +{ + tree type; + + { + /* Unique name. This is just like an unnamed class, but we cannot use + make_anon_name because of certain checks against TYPE_ANONYMOUS_P. */ + tree name; + name = make_lambda_name (); + + /* Create the new RECORD_TYPE for this lambda. */ + type = xref_tag (/*tag_code=*/record_type, + name, + /*scope=*/ts_within_enclosing_non_class, + /*template_header_p=*/false); + } + + /* Designate it as a struct so that we can use aggregate initialization. */ + CLASSTYPE_DECLARED_CLASS (type) = false; + + /* Clear base types. */ + xref_basetypes (type, /*bases=*/NULL_TREE); + + /* Start the class. */ + type = begin_class_definition (type, /*attributes=*/NULL_TREE); + + /* Cross-reference the expression and the type. */ + TREE_TYPE (lambda) = type; + CLASSTYPE_LAMBDA_EXPR (type) = lambda; + + return type; +} + +/* Returns the type to use for the return type of the operator() of a + closure class. */ + +tree +lambda_return_type (tree expr) +{ + tree type; + if (BRACE_ENCLOSED_INITIALIZER_P (expr)) + { + warning (0, "cannot deduce lambda return type from a braced-init-list"); + return void_type_node; + } + if (type_dependent_expression_p (expr)) + { + type = cxx_make_type (DECLTYPE_TYPE); + DECLTYPE_TYPE_EXPR (type) = expr; + DECLTYPE_FOR_LAMBDA_RETURN (type) = true; + SET_TYPE_STRUCTURAL_EQUALITY (type); + } + else + type = type_decays_to (unlowered_expr_type (expr)); + return type; +} + +/* Given a LAMBDA_EXPR or closure type LAMBDA, return the op() of the + closure type. */ + +tree +lambda_function (tree lambda) +{ + tree type; + if (TREE_CODE (lambda) == LAMBDA_EXPR) + type = TREE_TYPE (lambda); + else + type = lambda; + gcc_assert (LAMBDA_TYPE_P (type)); + /* Don't let debug_tree cause instantiation. */ + if (CLASSTYPE_TEMPLATE_INSTANTIATION (type) && !COMPLETE_TYPE_P (type)) + return NULL_TREE; + lambda = lookup_member (type, ansi_opname (CALL_EXPR), + /*protect=*/0, /*want_type=*/false); + if (lambda) + lambda = BASELINK_FUNCTIONS (lambda); + return lambda; +} + +/* Returns the type to use for the FIELD_DECL corresponding to the + capture of EXPR. + The caller should add REFERENCE_TYPE for capture by reference. */ + +tree +lambda_capture_field_type (tree expr) +{ + tree type; + if (type_dependent_expression_p (expr)) + { + type = cxx_make_type (DECLTYPE_TYPE); + DECLTYPE_TYPE_EXPR (type) = expr; + DECLTYPE_FOR_LAMBDA_CAPTURE (type) = true; + SET_TYPE_STRUCTURAL_EQUALITY (type); + } + else + type = non_reference (unlowered_expr_type (expr)); + return type; +} + +/* Recompute the return type for LAMBDA with body of the form: + { return EXPR ; } */ + +void +apply_lambda_return_type (tree lambda, tree return_type) +{ + tree fco = lambda_function (lambda); + tree result; + + LAMBDA_EXPR_RETURN_TYPE (lambda) = return_type; + + /* If we got a DECLTYPE_TYPE, don't stick it in the function yet, + it would interfere with instantiating the closure type. */ + if (dependent_type_p (return_type)) + return; + if (return_type == error_mark_node) + return; + + /* TREE_TYPE (FUNCTION_DECL) == METHOD_TYPE + TREE_TYPE (METHOD_TYPE) == return-type */ + TREE_TYPE (fco) = change_return_type (return_type, TREE_TYPE (fco)); + + result = DECL_RESULT (fco); + if (result == NULL_TREE) + return; + + /* We already have a DECL_RESULT from start_preparsed_function. + Now we need to redo the work it and allocate_struct_function + did to reflect the new type. */ + result = build_decl (input_location, RESULT_DECL, NULL_TREE, + TYPE_MAIN_VARIANT (return_type)); + DECL_ARTIFICIAL (result) = 1; + DECL_IGNORED_P (result) = 1; + cp_apply_type_quals_to_decl (cp_type_quals (return_type), + result); + + DECL_RESULT (fco) = result; + + if (!processing_template_decl && aggregate_value_p (result, fco)) + { +#ifdef PCC_STATIC_STRUCT_RETURN + cfun->returns_pcc_struct = 1; +#endif + cfun->returns_struct = 1; + } + +} + +/* DECL is a local variable or parameter from the surrounding scope of a + lambda-expression. Returns the decltype for a use of the capture field + for DECL even if it hasn't been captured yet. */ + +static tree +capture_decltype (tree decl) +{ + tree lam = CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (current_function_decl)); + /* FIXME do lookup instead of list walk? */ + tree cap = value_member (decl, LAMBDA_EXPR_CAPTURE_LIST (lam)); + tree type; + + if (cap) + type = TREE_TYPE (TREE_PURPOSE (cap)); + else + switch (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lam)) + { + case CPLD_NONE: + error ("%qD is not captured", decl); + return error_mark_node; + + case CPLD_COPY: + type = TREE_TYPE (decl); + if (TREE_CODE (type) == REFERENCE_TYPE + && TREE_CODE (TREE_TYPE (type)) != FUNCTION_TYPE) + type = TREE_TYPE (type); + break; + + case CPLD_REFERENCE: + type = TREE_TYPE (decl); + if (TREE_CODE (type) != REFERENCE_TYPE) + type = build_reference_type (TREE_TYPE (decl)); + break; + + default: + gcc_unreachable (); + } + + if (TREE_CODE (type) != REFERENCE_TYPE) + { + if (!LAMBDA_EXPR_MUTABLE_P (lam)) + type = cp_build_qualified_type (type, (TYPE_QUALS (type) + |TYPE_QUAL_CONST)); + type = build_reference_type (type); + } + return type; +} + +/* From an ID and INITIALIZER, create a capture (by reference if + BY_REFERENCE_P is true), add it to the capture-list for LAMBDA, + and return it. */ + +tree +add_capture (tree lambda, tree id, tree initializer, bool by_reference_p, + bool explicit_init_p) +{ + tree type; + tree member; + + type = lambda_capture_field_type (initializer); + if (by_reference_p) + { + type = build_reference_type (type); + if (!real_lvalue_p (initializer)) + error ("cannot capture %qE by reference", initializer); + } + + /* Make member variable. */ + member = build_lang_decl (FIELD_DECL, id, type); + if (!explicit_init_p) + /* Normal captures are invisible to name lookup but uses are replaced + with references to the capture field; we implement this by only + really making them invisible in unevaluated context; see + qualify_lookup. For now, let's make explicitly initialized captures + always visible. */ + DECL_NORMAL_CAPTURE_P (member) = true; + + /* Add it to the appropriate closure class if we've started it. */ + if (current_class_type && current_class_type == TREE_TYPE (lambda)) + finish_member_declaration (member); + + LAMBDA_EXPR_CAPTURE_LIST (lambda) + = tree_cons (member, initializer, LAMBDA_EXPR_CAPTURE_LIST (lambda)); + + if (id == get_identifier ("__this")) + { + if (LAMBDA_EXPR_CAPTURES_THIS_P (lambda)) + error ("already captured % in lambda expression"); + LAMBDA_EXPR_THIS_CAPTURE (lambda) = member; + } + + return member; +} + +/* Register all the capture members on the list CAPTURES, which is the + LAMBDA_EXPR_CAPTURE_LIST for the lambda after the introducer. */ + +void register_capture_members (tree captures) +{ + if (captures) + { + register_capture_members (TREE_CHAIN (captures)); + finish_member_declaration (TREE_PURPOSE (captures)); + } +} + +/* Given a FIELD_DECL decl belonging to a closure type, return a + COMPONENT_REF of it relative to the 'this' parameter of the op() for + that type. */ + +static tree +thisify_lambda_field (tree decl) +{ + tree context = lambda_function (DECL_CONTEXT (decl)); + tree object = cp_build_indirect_ref (DECL_ARGUMENTS (context), + RO_NULL, + tf_warning_or_error); + return finish_non_static_data_member (decl, object, + /*qualifying_scope*/NULL_TREE); +} + +/* Similar to add_capture, except this works on a stack of nested lambdas. + BY_REFERENCE_P in this case is derived from the default capture mode. + Returns the capture for the lambda at the bottom of the stack. */ + +tree +add_default_capture (tree lambda_stack, tree id, tree initializer) +{ + bool this_capture_p = (id == get_identifier ("__this")); + + tree member = NULL_TREE; + + tree saved_class_type = current_class_type; + + tree node; + + for (node = lambda_stack; + node; + node = TREE_CHAIN (node)) + { + tree lambda = TREE_VALUE (node); + + current_class_type = TREE_TYPE (lambda); + member = add_capture (lambda, + id, + initializer, + /*by_reference_p=*/ + (!this_capture_p + && (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda) + == CPLD_REFERENCE)), + /*explicit_init_p=*/false); + initializer = thisify_lambda_field (member); + } + + current_class_type = saved_class_type; + + return member; +} + +/* Return the capture pertaining to a use of 'this' in LAMBDA, in the form of an + INDIRECT_REF, possibly adding it through default capturing. */ + +tree +lambda_expr_this_capture (tree lambda) +{ + tree result; + + tree this_capture = LAMBDA_EXPR_THIS_CAPTURE (lambda); + + /* Try to default capture 'this' if we can. */ + if (!this_capture + && LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda) != CPLD_NONE) + { + tree containing_function = TYPE_CONTEXT (TREE_TYPE (lambda)); + tree lambda_stack = tree_cons (NULL_TREE, lambda, NULL_TREE); + tree init = NULL_TREE; + + /* If we are in a lambda function, we can move out until we hit: + 1. a non-lambda function, + 2. a lambda function capturing 'this', or + 3. a non-default capturing lambda function. */ + while (LAMBDA_FUNCTION_P (containing_function)) + { + tree lambda + = CLASSTYPE_LAMBDA_EXPR (DECL_CONTEXT (containing_function)); + + if (LAMBDA_EXPR_THIS_CAPTURE (lambda)) + { + /* An outer lambda has already captured 'this'. */ + tree cap = LAMBDA_EXPR_THIS_CAPTURE (lambda); + init = thisify_lambda_field (cap); + break; + } + + if (LAMBDA_EXPR_DEFAULT_CAPTURE_MODE (lambda) == CPLD_NONE) + /* An outer lambda won't let us capture 'this'. */ + break; + + lambda_stack = tree_cons (NULL_TREE, + lambda, + lambda_stack); + + containing_function = decl_function_context (containing_function); + } + + if (!init && DECL_NONSTATIC_MEMBER_FUNCTION_P (containing_function) + && !LAMBDA_FUNCTION_P (containing_function)) + /* First parameter is 'this'. */ + init = DECL_ARGUMENTS (containing_function); + + if (init) + this_capture = add_default_capture (lambda_stack, + /*id=*/get_identifier ("__this"), + init); + } + + if (!this_capture) + { + error ("% was not captured for this lambda function"); + result = error_mark_node; + } + else + { + /* To make sure that current_class_ref is for the lambda. */ + gcc_assert (TYPE_MAIN_VARIANT (TREE_TYPE (current_class_ref)) == TREE_TYPE (lambda)); + + result = finish_non_static_data_member (this_capture, + current_class_ref, + /*qualifying_scope=*/NULL_TREE); + + /* If 'this' is captured, each use of 'this' is transformed into an + access to the corresponding unnamed data member of the closure + type cast (_expr.cast_ 5.4) to the type of 'this'. [ The cast + ensures that the transformed expression is an rvalue. ] */ + result = rvalue (result); + } + + return result; +} + +/* Returns the method basetype of the innermost non-lambda function, or + NULL_TREE if none. */ + +tree +nonlambda_method_basetype (void) +{ + tree fn, type; + if (!current_class_ref) + return NULL_TREE; + + type = current_class_type; + if (!LAMBDA_TYPE_P (type)) + return type; + + /* Find the nearest enclosing non-lambda function. */ + fn = TYPE_NAME (type); + do + fn = decl_function_context (fn); + while (fn && LAMBDA_FUNCTION_P (fn)); + + if (!fn || !DECL_NONSTATIC_MEMBER_FUNCTION_P (fn)) + return NULL_TREE; + + return TYPE_METHOD_BASETYPE (TREE_TYPE (fn)); +} + +/* If the closure TYPE has a static op(), also add a conversion to function + pointer. */ + +void +maybe_add_lambda_conv_op (tree type) +{ + bool nested = (current_function_decl != NULL_TREE); + tree callop = lambda_function (type); + tree rettype, name, fntype, fn, body, compound_stmt; + tree thistype, stattype, statfn, convfn, call, arg; + VEC (tree, gc) *argvec; + + if (LAMBDA_EXPR_CAPTURE_LIST (CLASSTYPE_LAMBDA_EXPR (type)) != NULL_TREE) + return; + + stattype = build_function_type (TREE_TYPE (TREE_TYPE (callop)), + FUNCTION_ARG_CHAIN (callop)); + + /* First build up the conversion op. */ + + rettype = build_pointer_type (stattype); + name = mangle_conv_op_name_for_type (rettype); + thistype = cp_build_qualified_type (type, TYPE_QUAL_CONST); + fntype = build_method_type_directly (thistype, rettype, void_list_node); + fn = convfn = build_lang_decl (FUNCTION_DECL, name, fntype); + DECL_SOURCE_LOCATION (fn) = DECL_SOURCE_LOCATION (callop); + + if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn + && DECL_ALIGN (fn) < 2 * BITS_PER_UNIT) + DECL_ALIGN (fn) = 2 * BITS_PER_UNIT; + + SET_OVERLOADED_OPERATOR_CODE (fn, TYPE_EXPR); + grokclassfn (type, fn, NO_SPECIAL); + set_linkage_according_to_type (type, fn); + rest_of_decl_compilation (fn, toplevel_bindings_p (), at_eof); + DECL_IN_AGGR_P (fn) = 1; + DECL_ARTIFICIAL (fn) = 1; + DECL_NOT_REALLY_EXTERN (fn) = 1; + DECL_DECLARED_INLINE_P (fn) = 1; + DECL_ARGUMENTS (fn) = build_this_parm (fntype, TYPE_QUAL_CONST); + if (nested) + DECL_INTERFACE_KNOWN (fn) = 1; + + add_method (type, fn, NULL_TREE); + + /* Generic thunk code fails for varargs; we'll complain in mark_used if + the conversion op is used. */ + if (varargs_function_p (callop)) + { + DECL_DELETED_FN (fn) = 1; + return; + } + + /* Now build up the thunk to be returned. */ + + name = get_identifier ("_FUN"); + fn = statfn = build_lang_decl (FUNCTION_DECL, name, stattype); + DECL_SOURCE_LOCATION (fn) = DECL_SOURCE_LOCATION (callop); + if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn + && DECL_ALIGN (fn) < 2 * BITS_PER_UNIT) + DECL_ALIGN (fn) = 2 * BITS_PER_UNIT; + grokclassfn (type, fn, NO_SPECIAL); + set_linkage_according_to_type (type, fn); + rest_of_decl_compilation (fn, toplevel_bindings_p (), at_eof); + DECL_IN_AGGR_P (fn) = 1; + DECL_ARTIFICIAL (fn) = 1; + DECL_NOT_REALLY_EXTERN (fn) = 1; + DECL_DECLARED_INLINE_P (fn) = 1; + DECL_STATIC_FUNCTION_P (fn) = 1; + DECL_ARGUMENTS (fn) = copy_list (TREE_CHAIN (DECL_ARGUMENTS (callop))); + for (arg = DECL_ARGUMENTS (fn); arg; arg = TREE_CHAIN (arg)) + DECL_CONTEXT (arg) = fn; + if (nested) + DECL_INTERFACE_KNOWN (fn) = 1; + + add_method (type, fn, NULL_TREE); + + if (nested) + push_function_context (); + + /* Generate the body of the thunk. */ + + start_preparsed_function (statfn, NULL_TREE, + SF_PRE_PARSED | SF_INCLASS_INLINE); + if (DECL_ONE_ONLY (statfn)) + { + /* Put the thunk in the same comdat group as the call op. */ + struct cgraph_node *callop_node, *thunk_node; + DECL_COMDAT_GROUP (statfn) = DECL_COMDAT_GROUP (callop); + callop_node = cgraph_node (callop); + thunk_node = cgraph_node (statfn); + gcc_assert (callop_node->same_comdat_group == NULL); + gcc_assert (thunk_node->same_comdat_group == NULL); + callop_node->same_comdat_group = thunk_node; + thunk_node->same_comdat_group = callop_node; + } + body = begin_function_body (); + compound_stmt = begin_compound_stmt (0); + + arg = build1 (NOP_EXPR, TREE_TYPE (DECL_ARGUMENTS (callop)), + null_pointer_node); + argvec = make_tree_vector (); + VEC_quick_push (tree, argvec, arg); + for (arg = DECL_ARGUMENTS (statfn); arg; arg = TREE_CHAIN (arg)) + VEC_safe_push (tree, gc, argvec, arg); + call = build_call_a (callop, VEC_length (tree, argvec), + VEC_address (tree, argvec)); + CALL_FROM_THUNK_P (call) = 1; + if (MAYBE_CLASS_TYPE_P (TREE_TYPE (call))) + call = build_cplus_new (TREE_TYPE (call), call); + call = convert_from_reference (call); + finish_return_stmt (call); + + finish_compound_stmt (compound_stmt); + finish_function_body (body); + + expand_or_defer_fn (finish_function (2)); + + /* Generate the body of the conversion op. */ + + start_preparsed_function (convfn, NULL_TREE, + SF_PRE_PARSED | SF_INCLASS_INLINE); + body = begin_function_body (); + compound_stmt = begin_compound_stmt (0); + + finish_return_stmt (decay_conversion (statfn)); + + finish_compound_stmt (compound_stmt); + finish_function_body (body); + + expand_or_defer_fn (finish_function (2)); + + if (nested) + pop_function_context (); +} #include "gt-cp-semantics.h"