X-Git-Url: http://git.sourceforge.jp/view?a=blobdiff_plain;f=gcc%2Ftree-sra.c;h=0a9b3df3b15a7400791de62fc98472811e168a7a;hb=b2afff2d97f09cb4c4921f5af82ea52f26b74adb;hp=ebb40c4aa1ad95938a5a71e7f4f0f2d6b3bcf0b9;hpb=54c0af3a2327f757dd3a84075a0bde3d4d0a192f;p=pf3gnuchains%2Fgcc-fork.git diff --git a/gcc/tree-sra.c b/gcc/tree-sra.c index ebb40c4aa1a..0a9b3df3b15 100644 --- a/gcc/tree-sra.c +++ b/gcc/tree-sra.c @@ -77,6 +77,7 @@ along with GCC; see the file COPYING3. If not see #include "alloc-pool.h" #include "tm.h" #include "tree.h" +#include "expr.h" #include "gimple.h" #include "cgraph.h" #include "tree-flow.h" @@ -166,6 +167,10 @@ struct access /* Is this particular access write access? */ unsigned write : 1; + /* Is this access an artificial one created to scalarize some record + entirely? */ + unsigned total_scalarization : 1; + /* Is this access currently in the work queue? */ unsigned grp_queued : 1; @@ -244,6 +249,10 @@ static struct pointer_map_t *base_access_vec; /* Bitmap of candidates. */ static bitmap candidate_bitmap; +/* Bitmap of candidates which we should try to entirely scalarize away and + those which cannot be (because they are and need be used as a whole). */ +static bitmap should_scalarize_away_bitmap, cannot_scalarize_away_bitmap; + /* Obstack for creation of fancy names. */ static struct obstack name_obstack; @@ -258,6 +267,13 @@ static int func_param_count; __builtin_apply_args. */ static bool encountered_apply_args; +/* Set by scan_function when it finds a recursive call. */ +static bool encountered_recursive_call; + +/* Set by scan_function when it finds a recursive call with less actual + arguments than formal parameters.. */ +static bool encountered_unchangable_recursive_call; + /* This is a table in which for each basic block and parameter there is a distance (offset + size) in that parameter which is dereferenced and accessed in that BB. */ @@ -336,18 +352,22 @@ dump_access (FILE *f, struct access *access, bool grp) fprintf (f, ", type = "); print_generic_expr (f, access->type, 0); if (grp) - fprintf (f, ", grp_write = %d, grp_read = %d, grp_hint = %d, " + fprintf (f, ", grp_write = %d, total_scalarization = %d, " + "grp_read = %d, grp_hint = %d, " "grp_covered = %d, grp_unscalarizable_region = %d, " "grp_unscalarized_data = %d, grp_partial_lhs = %d, " "grp_to_be_replaced = %d, grp_maybe_modified = %d, " "grp_not_necessarilly_dereferenced = %d\n", - access->grp_write, access->grp_read, access->grp_hint, + access->grp_write, access->total_scalarization, + access->grp_read, access->grp_hint, access->grp_covered, access->grp_unscalarizable_region, access->grp_unscalarized_data, access->grp_partial_lhs, access->grp_to_be_replaced, access->grp_maybe_modified, access->grp_not_necessarilly_dereferenced); else - fprintf (f, ", write = %d, grp_partial_lhs = %d\n", access->write, + fprintf (f, ", write = %d, total_scalarization = %d, " + "grp_partial_lhs = %d\n", + access->write, access->total_scalarization, access->grp_partial_lhs); } @@ -539,12 +559,16 @@ static void sra_initialize (void) { candidate_bitmap = BITMAP_ALLOC (NULL); + should_scalarize_away_bitmap = BITMAP_ALLOC (NULL); + cannot_scalarize_away_bitmap = BITMAP_ALLOC (NULL); gcc_obstack_init (&name_obstack); access_pool = create_alloc_pool ("SRA accesses", sizeof (struct access), 16); link_pool = create_alloc_pool ("SRA links", sizeof (struct assign_link), 16); base_access_vec = pointer_map_create (); memset (&sra_stats, 0, sizeof (sra_stats)); encountered_apply_args = false; + encountered_recursive_call = false; + encountered_unchangable_recursive_call = false; } /* Hook fed to pointer_map_traverse, deallocate stored vectors. */ @@ -566,6 +590,8 @@ static void sra_deinitialize (void) { BITMAP_FREE (candidate_bitmap); + BITMAP_FREE (should_scalarize_away_bitmap); + BITMAP_FREE (cannot_scalarize_away_bitmap); free_alloc_pool (access_pool); free_alloc_pool (link_pool); obstack_free (&name_obstack, NULL); @@ -676,6 +702,37 @@ mark_parm_dereference (tree base, HOST_WIDE_INT dist, gimple stmt) bb_dereferences[idx] = dist; } +/* Allocate an access structure for BASE, OFFSET and SIZE, clear it, fill in + the three fields. Also add it to the vector of accesses corresponding to + the base. Finally, return the new access. */ + +static struct access * +create_access_1 (tree base, HOST_WIDE_INT offset, HOST_WIDE_INT size) +{ + VEC (access_p, heap) *vec; + struct access *access; + void **slot; + + access = (struct access *) pool_alloc (access_pool); + memset (access, 0, sizeof (struct access)); + access->base = base; + access->offset = offset; + access->size = size; + + slot = pointer_map_contains (base_access_vec, base); + if (slot) + vec = (VEC (access_p, heap) *) *slot; + else + vec = VEC_alloc (access_p, heap, 32); + + VEC_safe_push (access_p, heap, vec, access); + + *((struct VEC (access_p,heap) **) + pointer_map_insert (base_access_vec, base)) = vec; + + return access; +} + /* Create and insert access for EXPR. Return created access, or NULL if it is not possible. */ @@ -683,8 +740,6 @@ static struct access * create_access (tree expr, gimple stmt, bool write) { struct access *access; - void **slot; - VEC (access_p,heap) *vec; HOST_WIDE_INT offset, size, max_size; tree base = expr; bool ptr, unscalarizable_region = false; @@ -735,30 +790,87 @@ create_access (tree expr, gimple stmt, bool write) } } - access = (struct access *) pool_alloc (access_pool); - memset (access, 0, sizeof (struct access)); - - access->base = base; - access->offset = offset; - access->size = size; + access = create_access_1 (base, offset, size); access->expr = expr; access->type = TREE_TYPE (expr); access->write = write; access->grp_unscalarizable_region = unscalarizable_region; access->stmt = stmt; - slot = pointer_map_contains (base_access_vec, base); - if (slot) - vec = (VEC (access_p, heap) *) *slot; - else - vec = VEC_alloc (access_p, heap, 32); + return access; +} - VEC_safe_push (access_p, heap, vec, access); - *((struct VEC (access_p,heap) **) - pointer_map_insert (base_access_vec, base)) = vec; +/* Return true iff TYPE is a RECORD_TYPE with fields that are either of gimple + register types or (recursively) records with only these two kinds of fields. + It also returns false if any of these records has a zero-size field as its + last field. */ - return access; +static bool +type_consists_of_records_p (tree type) +{ + tree fld; + bool last_fld_has_zero_size = false; + + if (TREE_CODE (type) != RECORD_TYPE) + return false; + + for (fld = TYPE_FIELDS (type); fld; fld = TREE_CHAIN (fld)) + if (TREE_CODE (fld) == FIELD_DECL) + { + tree ft = TREE_TYPE (fld); + + if (!is_gimple_reg_type (ft) + && !type_consists_of_records_p (ft)) + return false; + + last_fld_has_zero_size = tree_low_cst (DECL_SIZE (fld), 1) == 0; + } + + if (last_fld_has_zero_size) + return false; + + return true; +} + +/* Create total_scalarization accesses for all scalar type fields in DECL that + must be of a RECORD_TYPE conforming to type_consists_of_records_p. BASE + must be the top-most VAR_DECL representing the variable, OFFSET must be the + offset of DECL within BASE. */ + +static void +completely_scalarize_record (tree base, tree decl, HOST_WIDE_INT offset) +{ + tree fld, decl_type = TREE_TYPE (decl); + + for (fld = TYPE_FIELDS (decl_type); fld; fld = TREE_CHAIN (fld)) + if (TREE_CODE (fld) == FIELD_DECL) + { + HOST_WIDE_INT pos = offset + int_bit_position (fld); + tree ft = TREE_TYPE (fld); + + if (is_gimple_reg_type (ft)) + { + struct access *access; + HOST_WIDE_INT size; + tree expr; + bool ok; + + size = tree_low_cst (DECL_SIZE (fld), 1); + expr = base; + ok = build_ref_for_offset (&expr, TREE_TYPE (base), pos, + ft, false); + gcc_assert (ok); + + access = create_access_1 (base, pos, size); + access->expr = expr; + access->type = ft; + access->total_scalarization = 1; + /* Accesses for intraprocedural SRA can have their stmt NULL. */ + } + else + completely_scalarize_record (base, fld, pos); + } } @@ -851,7 +963,19 @@ build_access_from_expr (tree *expr_ptr, gimple_stmt_iterator *gsi ATTRIBUTE_UNUSED, bool write, void *data ATTRIBUTE_UNUSED) { - return build_access_from_expr_1 (expr_ptr, gsi_stmt (*gsi), write) != NULL; + struct access *access; + + access = build_access_from_expr_1 (expr_ptr, gsi_stmt (*gsi), write); + if (access) + { + /* This means the aggregate is accesses as a whole in a way other than an + assign statement and thus cannot be removed even if we had a scalar + replacement for everything. */ + if (cannot_scalarize_away_bitmap) + bitmap_set_bit (cannot_scalarize_away_bitmap, DECL_UID (access->base)); + return true; + } + return false; } /* Disqualify LHS and RHS for scalarization if STMT must end its basic block in @@ -907,6 +1031,10 @@ build_accesses_from_assign (gimple *stmt_ptr, racc = build_access_from_expr_1 (rhs_ptr, stmt, false); lacc = build_access_from_expr_1 (lhs_ptr, stmt, true); + if (should_scalarize_away_bitmap && !gimple_has_volatile_ops (stmt) + && racc && !is_gimple_reg_type (racc->type)) + bitmap_set_bit (should_scalarize_away_bitmap, DECL_UID (racc->base)); + if (lacc && racc && (sra_mode == SRA_MODE_EARLY_INTRA || sra_mode == SRA_MODE_INTRA) && !lacc->grp_unscalarizable_region @@ -938,12 +1066,22 @@ static bool asm_visit_addr (gimple stmt ATTRIBUTE_UNUSED, tree op, void *data ATTRIBUTE_UNUSED) { - if (DECL_P (op)) + op = get_base_address (op); + if (op + && DECL_P (op)) disqualify_candidate (op, "Non-scalarizable GIMPLE_ASM operand."); return false; } +/* Return true iff callsite CALL has at least as many actual arguments as there + are formal parameters of the function currently processed by IPA-SRA. */ + +static inline bool +callsite_has_enough_arguments_p (gimple call) +{ + return gimple_call_num_args (call) >= (unsigned) func_param_count; +} /* Scan function and look for interesting statements. Return true if any has been found or processed, as indicated by callbacks. SCAN_EXPR is a callback @@ -1014,15 +1152,24 @@ scan_function (bool (*scan_expr) (tree *, gimple_stmt_iterator *, bool, void *), any |= scan_expr (argp, &gsi, false, data); } - if (analysis_stage) + if (analysis_stage && sra_mode == SRA_MODE_EARLY_IPA) { tree dest = gimple_call_fndecl (stmt); int flags = gimple_call_flags (stmt); - if (dest - && DECL_BUILT_IN_CLASS (dest) == BUILT_IN_NORMAL - && DECL_FUNCTION_CODE (dest) == BUILT_IN_APPLY_ARGS) - encountered_apply_args = true; + if (dest) + { + if (DECL_BUILT_IN_CLASS (dest) == BUILT_IN_NORMAL + && DECL_FUNCTION_CODE (dest) == BUILT_IN_APPLY_ARGS) + encountered_apply_args = true; + if (cgraph_get_node (dest) + == cgraph_get_node (current_function_decl)) + { + encountered_recursive_call = true; + if (!callsite_has_enough_arguments_p (stmt)) + encountered_unchangable_recursive_call = true; + } + } if (final_bbs && (flags & (ECF_CONST | ECF_PURE)) == 0) @@ -1262,7 +1409,12 @@ build_ref_for_offset_1 (tree *res, tree type, HOST_WIDE_INT offset, if (!tr_size || !host_integerp (tr_size, 1)) continue; size = tree_low_cst (tr_size, 1); - if (pos > offset || (pos + size) <= offset) + if (size == 0) + { + if (pos != offset) + continue; + } + else if (pos > offset || (pos + size) <= offset) continue; if (res) @@ -1290,7 +1442,7 @@ build_ref_for_offset_1 (tree *res, tree type, HOST_WIDE_INT offset, el_size = tree_low_cst (tr_size, 1); minidx = TYPE_MIN_VALUE (TYPE_DOMAIN (type)); - if (TREE_CODE (minidx) != INTEGER_CST) + if (TREE_CODE (minidx) != INTEGER_CST || el_size == 0) return false; if (res) { @@ -1429,6 +1581,7 @@ sort_and_splice_var_accesses (tree var) bool grp_write = access->write; bool grp_read = !access->write; bool multiple_reads = false; + bool total_scalarization = access->total_scalarization; bool grp_partial_lhs = access->grp_partial_lhs; bool first_scalar = is_gimple_reg_type (access->type); bool unscalarizable_region = access->grp_unscalarizable_region; @@ -1462,6 +1615,7 @@ sort_and_splice_var_accesses (tree var) } grp_partial_lhs |= ac2->grp_partial_lhs; unscalarizable_region |= ac2->grp_unscalarizable_region; + total_scalarization |= ac2->total_scalarization; relink_to_new_repr (access, ac2); /* If there are both aggregate-type and scalar-type accesses with @@ -1477,7 +1631,7 @@ sort_and_splice_var_accesses (tree var) access->group_representative = access; access->grp_write = grp_write; access->grp_read = grp_read; - access->grp_hint = multiple_reads; + access->grp_hint = multiple_reads || total_scalarization; access->grp_partial_lhs = grp_partial_lhs; access->grp_unscalarizable_region = unscalarizable_region; if (access->first_link) @@ -1512,6 +1666,7 @@ create_access_replacement (struct access *access) DECL_SOURCE_LOCATION (repl) = DECL_SOURCE_LOCATION (access->base); DECL_ARTIFICIAL (repl) = 1; + DECL_IGNORED_P (repl) = DECL_IGNORED_P (access->base); if (DECL_NAME (access->base) && !DECL_IGNORED_P (access->base) @@ -1524,11 +1679,10 @@ create_access_replacement (struct access *access) SET_DECL_DEBUG_EXPR (repl, access->expr); DECL_DEBUG_EXPR_IS_FROM (repl) = 1; - DECL_IGNORED_P (repl) = 0; + TREE_NO_WARNING (repl) = TREE_NO_WARNING (access->base); } - - DECL_IGNORED_P (repl) = DECL_IGNORED_P (access->base); - TREE_NO_WARNING (repl) = TREE_NO_WARNING (access->base); + else + TREE_NO_WARNING (repl) = 1; if (dump_file) { @@ -1659,7 +1813,13 @@ analyze_access_subtree (struct access *root, bool allow_replacements, if (allow_replacements && scalar && !root->first_child && (root->grp_hint - || (direct_read && root->grp_write))) + || (direct_read && root->grp_write)) + /* We must not ICE later on when trying to build an access to the + original data within the aggregate even when it is impossible to do in + a defined way like in the PR 42703 testcase. Therefore we check + pre-emptively here that we will be able to do that. */ + && build_ref_for_offset (NULL, TREE_TYPE (root->base), root->offset, + root->type, false)) { if (dump_file && (dump_flags & TDF_DETAILS)) { @@ -1881,7 +2041,31 @@ analyze_all_variable_accesses (void) int res = 0; bitmap tmp = BITMAP_ALLOC (NULL); bitmap_iterator bi; - unsigned i; + unsigned i, max_total_scalarization_size; + + max_total_scalarization_size = UNITS_PER_WORD * BITS_PER_UNIT + * MOVE_RATIO (optimize_function_for_speed_p (cfun)); + + EXECUTE_IF_SET_IN_BITMAP (candidate_bitmap, 0, i, bi) + if (bitmap_bit_p (should_scalarize_away_bitmap, i) + && !bitmap_bit_p (cannot_scalarize_away_bitmap, i)) + { + tree var = referenced_var (i); + + if (TREE_CODE (var) == VAR_DECL + && ((unsigned) tree_low_cst (TYPE_SIZE (TREE_TYPE (var)), 1) + <= max_total_scalarization_size) + && type_consists_of_records_p (TREE_TYPE (var))) + { + completely_scalarize_record (var, var, 0); + if (dump_file && (dump_flags & TDF_DETAILS)) + { + fprintf (dump_file, "Will attempt to totally scalarize "); + print_generic_expr (dump_file, var, 0); + fprintf (dump_file, " (UID: %u): \n", DECL_UID (var)); + } + } + } bitmap_copy (tmp, candidate_bitmap); EXECUTE_IF_SET_IN_BITMAP (tmp, 0, i, bi) @@ -2344,6 +2528,37 @@ sra_modify_constructor_assign (gimple *stmt, gimple_stmt_iterator *gsi) } } +/* Create a new suitable default definition SSA_NAME and replace all uses of + SSA with it. */ + +static void +replace_uses_with_default_def_ssa_name (tree ssa) +{ + tree repl, decl = SSA_NAME_VAR (ssa); + if (TREE_CODE (decl) == PARM_DECL) + { + tree tmp = create_tmp_var (TREE_TYPE (decl), "SR"); + if (TREE_CODE (TREE_TYPE (tmp)) == COMPLEX_TYPE + || TREE_CODE (TREE_TYPE (tmp)) == VECTOR_TYPE) + DECL_GIMPLE_REG_P (tmp) = 1; + + get_var_ann (tmp); + add_referenced_var (tmp); + repl = make_ssa_name (tmp, gimple_build_nop ()); + set_default_def (tmp, repl); + } + else + { + repl = gimple_default_def (cfun, decl); + if (!repl) + { + repl = make_ssa_name (decl, gimple_build_nop ()); + set_default_def (decl, repl); + } + } + + replace_uses_by (ssa, repl); +} /* Callback of scan_function to process assign statements. It examines both sides of the statement, replaces them with a scalare replacement if there is @@ -2361,6 +2576,7 @@ sra_modify_assign (gimple *stmt, gimple_stmt_iterator *gsi, bool modify_this_stmt = false; bool force_gimple_rhs = false; location_t loc = gimple_location (*stmt); + gimple_stmt_iterator orig_gsi = *gsi; if (!gimple_assign_single_p (*stmt)) return SRA_SA_NONE; @@ -2439,15 +2655,6 @@ sra_modify_assign (gimple *stmt, gimple_stmt_iterator *gsi, force_gimple_rhs = true; } } - - if (force_gimple_rhs) - rhs = force_gimple_operand_gsi (gsi, rhs, true, NULL_TREE, - true, GSI_SAME_STMT); - if (gimple_assign_rhs1 (*stmt) != rhs) - { - gimple_assign_set_rhs_from_tree (gsi, rhs); - gcc_assert (*stmt == gsi_stmt (*gsi)); - } } /* From this point on, the function deals with assignments in between @@ -2483,7 +2690,9 @@ sra_modify_assign (gimple *stmt, gimple_stmt_iterator *gsi, there to do the copying and then load the scalar replacements of the LHS. This is what the first branch does. */ - if (contains_view_convert_expr_p (rhs) || contains_view_convert_expr_p (lhs) + if (gimple_has_volatile_ops (*stmt) + || contains_view_convert_expr_p (rhs) + || contains_view_convert_expr_p (lhs) || (access_has_children_p (racc) && !ref_expr_for_all_replacements_p (racc, lhs, racc->offset)) || (access_has_children_p (lacc) @@ -2525,28 +2734,44 @@ sra_modify_assign (gimple *stmt, gimple_stmt_iterator *gsi, } else { - if (access_has_children_p (racc)) + if (racc) { - if (!racc->grp_unscalarized_data) + if (!racc->grp_to_be_replaced && !racc->grp_unscalarized_data) { - generate_subtree_copies (racc->first_child, lhs, - racc->offset, 0, 0, gsi, - false, false); + if (racc->first_child) + generate_subtree_copies (racc->first_child, lhs, + racc->offset, 0, 0, gsi, + false, false); gcc_assert (*stmt == gsi_stmt (*gsi)); + if (TREE_CODE (lhs) == SSA_NAME) + replace_uses_with_default_def_ssa_name (lhs); + unlink_stmt_vdef (*stmt); gsi_remove (gsi, true); sra_stats.deleted++; return SRA_SA_REMOVED; } - else + else if (racc->first_child) generate_subtree_copies (racc->first_child, lhs, racc->offset, 0, 0, gsi, false, true); } - else if (access_has_children_p (lacc)) + if (access_has_children_p (lacc)) generate_subtree_copies (lacc->first_child, rhs, lacc->offset, 0, 0, gsi, true, true); } } + + /* This gimplification must be done after generate_subtree_copies, lest we + insert the subtree copies in the middle of the gimplified sequence. */ + if (force_gimple_rhs) + rhs = force_gimple_operand_gsi (&orig_gsi, rhs, true, NULL_TREE, + true, GSI_SAME_STMT); + if (gimple_assign_rhs1 (*stmt) != rhs) + { + gimple_assign_set_rhs_from_tree (&orig_gsi, rhs); + gcc_assert (*stmt == gsi_stmt (orig_gsi)); + } + return modify_this_stmt ? SRA_SA_PROCESSED : SRA_SA_NONE; } @@ -2725,42 +2950,51 @@ ptr_parm_has_direct_uses (tree parm) FOR_EACH_IMM_USE_STMT (stmt, ui, name) { - if (gimple_assign_single_p (stmt)) + int uses_ok = 0; + use_operand_p use_p; + + if (is_gimple_debug (stmt)) + continue; + + /* Valid uses include dereferences on the lhs and the rhs. */ + if (gimple_has_lhs (stmt)) { - tree rhs = gimple_assign_rhs1 (stmt); - if (rhs == name) - ret = true; - else if (TREE_CODE (rhs) == ADDR_EXPR) - { - do - { - rhs = TREE_OPERAND (rhs, 0); - } - while (handled_component_p (rhs)); - if (INDIRECT_REF_P (rhs) && TREE_OPERAND (rhs, 0) == name) - ret = true; - } + tree lhs = gimple_get_lhs (stmt); + while (handled_component_p (lhs)) + lhs = TREE_OPERAND (lhs, 0); + if (INDIRECT_REF_P (lhs) + && TREE_OPERAND (lhs, 0) == name) + uses_ok++; } - else if (gimple_code (stmt) == GIMPLE_RETURN) + if (gimple_assign_single_p (stmt)) { - tree t = gimple_return_retval (stmt); - if (t == name) - ret = true; + tree rhs = gimple_assign_rhs1 (stmt); + while (handled_component_p (rhs)) + rhs = TREE_OPERAND (rhs, 0); + if (INDIRECT_REF_P (rhs) + && TREE_OPERAND (rhs, 0) == name) + uses_ok++; } else if (is_gimple_call (stmt)) { unsigned i; - for (i = 0; i < gimple_call_num_args (stmt); i++) + for (i = 0; i < gimple_call_num_args (stmt); ++i) { tree arg = gimple_call_arg (stmt, i); - if (arg == name) - { - ret = true; - break; - } + while (handled_component_p (arg)) + arg = TREE_OPERAND (arg, 0); + if (INDIRECT_REF_P (arg) + && TREE_OPERAND (arg, 0) == name) + uses_ok++; } } - else if (!is_gimple_debug (stmt)) + + /* If the number of valid uses does not match the number of + uses in this stmt there is an unhandled use. */ + FOR_EACH_IMM_USE_ON_STMT (use_p, ui) + --uses_ok; + + if (uses_ok != 0) ret = true; if (ret) @@ -2792,7 +3026,7 @@ find_param_candidates (void) if (TREE_THIS_VOLATILE (parm) || TREE_ADDRESSABLE (parm) - || is_va_list_type (type)) + || (!is_gimple_reg_type (type) && is_va_list_type (type))) continue; if (is_unused_scalar_param (parm)) @@ -3704,8 +3938,20 @@ sra_ipa_modify_assign (gimple *stmt_ptr, gimple_stmt_iterator *gsi, void *data) tree new_rhs = NULL_TREE; if (!useless_type_conversion_p (TREE_TYPE (*lhs_p), TREE_TYPE (*rhs_p))) - new_rhs = fold_build1_loc (gimple_location (stmt), VIEW_CONVERT_EXPR, - TREE_TYPE (*lhs_p), *rhs_p); + { + if (TREE_CODE (*rhs_p) == CONSTRUCTOR) + { + /* V_C_Es of constructors can cause trouble (PR 42714). */ + if (is_gimple_reg_type (TREE_TYPE (*lhs_p))) + *rhs_p = fold_convert (TREE_TYPE (*lhs_p), integer_zero_node); + else + *rhs_p = build_constructor (TREE_TYPE (*lhs_p), 0); + } + else + new_rhs = fold_build1_loc (gimple_location (stmt), + VIEW_CONVERT_EXPR, TREE_TYPE (*lhs_p), + *rhs_p); + } else if (REFERENCE_CLASS_P (*rhs_p) && is_gimple_reg_type (TREE_TYPE (*lhs_p)) && !is_gimple_reg (*lhs_p)) @@ -3760,6 +4006,21 @@ sra_ipa_reset_debug_stmts (ipa_parm_adjustment_vec adjustments) } } +/* Return true iff all callers have at least as many actual arguments as there + are formal parameters in the current function. */ + +static bool +all_callers_have_enough_arguments_p (struct cgraph_node *node) +{ + struct cgraph_edge *cs; + for (cs = node->callers; cs; cs = cs->next_caller) + if (!callsite_has_enough_arguments_p (cs->call_stmt)) + return false; + + return true; +} + + /* Convert all callers of NODE to pass parameters as given in ADJUSTMENTS. */ static void @@ -3795,6 +4056,10 @@ convert_callers (struct cgraph_node *node, ipa_parm_adjustment_vec adjustments) BITMAP_FREE (recomputed_callers); current_function_decl = old_cur_fndecl; + + if (!encountered_recursive_call) + return; + FOR_EACH_BB (this_block) { gimple_stmt_iterator gsi; @@ -3818,6 +4083,26 @@ convert_callers (struct cgraph_node *node, ipa_parm_adjustment_vec adjustments) return; } +/* Create an abstract origin declaration for OLD_DECL and make it an abstract + origin of the provided decl so that there are preserved parameters for debug + information. */ + +static void +create_abstract_origin (tree old_decl) +{ + if (!DECL_ABSTRACT_ORIGIN (old_decl)) + { + tree new_decl = copy_node (old_decl); + + DECL_ABSTRACT (new_decl) = 1; + SET_DECL_ASSEMBLER_NAME (new_decl, NULL_TREE); + SET_DECL_RTL (new_decl, NULL); + DECL_STRUCT_FUNCTION (new_decl) = NULL; + DECL_ARTIFICIAL (old_decl) = 1; + DECL_ABSTRACT_ORIGIN (old_decl) = new_decl; + } +} + /* Perform all the modification required in IPA-SRA for NODE to have parameters as given in ADJUSTMENTS. */ @@ -3829,6 +4114,7 @@ modify_function (struct cgraph_node *node, ipa_parm_adjustment_vec adjustments) ipa_modify_formal_parameters (alias->decl, adjustments, "ISRA"); /* current_function_decl must be handled last, after same_body aliases, as following functions will use what it computed. */ + create_abstract_origin (current_function_decl); ipa_modify_formal_parameters (current_function_decl, adjustments, "ISRA"); scan_function (sra_ipa_modify_expr, sra_ipa_modify_assign, replace_removed_params_ssa_names, false, adjustments); @@ -3907,6 +4193,14 @@ ipa_early_sra (void) goto simple_out; } + if (!all_callers_have_enough_arguments_p (node)) + { + if (dump_file) + fprintf (dump_file, "There are callers with insufficient number of " + "arguments.\n"); + goto simple_out; + } + bb_dereferences = XCNEWVEC (HOST_WIDE_INT, func_param_count * last_basic_block_for_function (cfun)); @@ -3921,6 +4215,14 @@ ipa_early_sra (void) goto out; } + if (encountered_unchangable_recursive_call) + { + if (dump_file) + fprintf (dump_file, "Function calls itself with insufficient " + "number of arguments.\n"); + goto out; + } + adjustments = analyze_all_param_acesses (); if (!adjustments) goto out;