X-Git-Url: http://git.sourceforge.jp/view?a=blobdiff_plain;f=gcc%2Fcfgexpand.c;h=3a36ee1794460077785c4445780fb5403b5090c2;hb=09c759aaf6da911aa1a44898d2b84121991bced9;hp=a0d4ba52b12ff24e7409bb916c22b37c741a7e4a;hpb=67817f0fb1a998e7fc4acd8229e128b4d0e5dc89;p=pf3gnuchains%2Fgcc-fork.git diff --git a/gcc/cfgexpand.c b/gcc/cfgexpand.c index a0d4ba52b12..3a36ee17944 100644 --- a/gcc/cfgexpand.c +++ b/gcc/cfgexpand.c @@ -36,6 +36,8 @@ along with GCC; see the file COPYING3. If not see #include "except.h" #include "flags.h" #include "diagnostic.h" +#include "tree-pretty-print.h" +#include "gimple-pretty-print.h" #include "toplev.h" #include "debug.h" #include "params.h" @@ -43,7 +45,8 @@ along with GCC; see the file COPYING3. If not see #include "value-prof.h" #include "target.h" #include "ssaexpand.h" - +#include "bitmap.h" +#include "sbitmap.h" /* This variable holds information helping the rewriting of SSA trees into RTL. */ @@ -77,8 +80,12 @@ gimple_assign_rhs_to_tree (gimple stmt) { t = gimple_assign_rhs1 (stmt); /* Avoid modifying this tree in place below. */ - if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t) - && gimple_location (stmt) != EXPR_LOCATION (t)) + if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t) + && gimple_location (stmt) != EXPR_LOCATION (t)) + || (gimple_block (stmt) + && currently_expanding_to_rtl + && EXPR_P (t) + && gimple_block (stmt) != TREE_BLOCK (t))) t = copy_node (t); } else @@ -86,51 +93,13 @@ gimple_assign_rhs_to_tree (gimple stmt) if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)) SET_EXPR_LOCATION (t, gimple_location (stmt)); + if (gimple_block (stmt) && currently_expanding_to_rtl && EXPR_P (t)) + TREE_BLOCK (t) = gimple_block (stmt); return t; } -/* Verify that there is exactly single jump instruction since last and attach - REG_BR_PROB note specifying probability. - ??? We really ought to pass the probability down to RTL expanders and let it - re-distribute it when the conditional expands into multiple conditionals. - This is however difficult to do. */ -void -add_reg_br_prob_note (rtx last, int probability) -{ - if (profile_status == PROFILE_ABSENT) - return; - for (last = NEXT_INSN (last); last && NEXT_INSN (last); last = NEXT_INSN (last)) - if (JUMP_P (last)) - { - /* It is common to emit condjump-around-jump sequence when we don't know - how to reverse the conditional. Special case this. */ - if (!any_condjump_p (last) - || !JUMP_P (NEXT_INSN (last)) - || !simplejump_p (NEXT_INSN (last)) - || !NEXT_INSN (NEXT_INSN (last)) - || !BARRIER_P (NEXT_INSN (NEXT_INSN (last))) - || !NEXT_INSN (NEXT_INSN (NEXT_INSN (last))) - || !LABEL_P (NEXT_INSN (NEXT_INSN (NEXT_INSN (last)))) - || NEXT_INSN (NEXT_INSN (NEXT_INSN (NEXT_INSN (last))))) - goto failed; - gcc_assert (!find_reg_note (last, REG_BR_PROB, 0)); - add_reg_note (last, REG_BR_PROB, - GEN_INT (REG_BR_PROB_BASE - probability)); - return; - } - if (!last || !JUMP_P (last) || !any_condjump_p (last)) - goto failed; - gcc_assert (!find_reg_note (last, REG_BR_PROB, 0)); - add_reg_note (last, REG_BR_PROB, GEN_INT (probability)); - return; -failed: - if (dump_file) - fprintf (dump_file, "Failed to add probability note\n"); -} - - #ifndef STACK_ALIGNMENT_NEEDED #define STACK_ALIGNMENT_NEEDED 1 #endif @@ -534,12 +503,12 @@ update_alias_info_with_stack_vars (void) for (j = i; j != EOC; j = stack_vars[j].next) { tree decl = stack_vars[j].decl; - unsigned int uid = DECL_UID (decl); + unsigned int uid = DECL_PT_UID (decl); /* We should never end up partitioning SSA names (though they may end up on the stack). Neither should we allocate stack space to something that is unused and thus unreferenced. */ gcc_assert (DECL_P (decl) - && referenced_var_lookup (uid)); + && referenced_var_lookup (DECL_UID (decl))); bitmap_set_bit (part, uid); *((bitmap *) pointer_map_insert (decls_to_partitions, (void *)(size_t) uid)) = part; @@ -549,7 +518,7 @@ update_alias_info_with_stack_vars (void) /* Make the SSA name point to all partition members. */ pi = get_ptr_info (name); - pt_solution_set (&pi->pt, part); + pt_solution_set (&pi->pt, part, false, false); } /* Make all points-to sets that contain one member of a partition @@ -574,8 +543,6 @@ update_alias_info_with_stack_vars (void) add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped, decls_to_partitions, visited, temp); - add_partitioned_vars_to_ptset (&cfun->gimple_df->callused, - decls_to_partitions, visited, temp); pointer_set_destroy (visited); pointer_map_destroy (decls_to_partitions); @@ -1323,6 +1290,7 @@ static void expand_used_vars (void) { tree t, next, outer_block = DECL_INITIAL (current_function_decl); + tree maybe_local_decls = NULL_TREE; unsigned i; /* Compute the phase of the stack frame for this function. */ @@ -1371,8 +1339,7 @@ expand_used_vars (void) if (is_gimple_reg (var)) { TREE_USED (var) = 0; - ggc_free (t); - continue; + goto next; } /* We didn't set a block for static or extern because it's hard to tell the difference between a global variable (re)declared @@ -1393,20 +1360,29 @@ expand_used_vars (void) TREE_USED (var) = 1; if (expand_now) + expand_one_var (var, true, true); + + next: + if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var)) { - expand_one_var (var, true, true); - if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var)) - { - rtx rtl = DECL_RTL_IF_SET (var); + rtx rtl = DECL_RTL_IF_SET (var); - /* Keep artificial non-ignored vars in cfun->local_decls - chain until instantiate_decls. */ - if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT)) - { - TREE_CHAIN (t) = cfun->local_decls; - cfun->local_decls = t; - continue; - } + /* Keep artificial non-ignored vars in cfun->local_decls + chain until instantiate_decls. */ + if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT)) + { + TREE_CHAIN (t) = cfun->local_decls; + cfun->local_decls = t; + continue; + } + else if (rtl == NULL_RTX) + { + /* If rtl isn't set yet, which can happen e.g. with + -fstack-protector, retry before returning from this + function. */ + TREE_CHAIN (t) = maybe_local_decls; + maybe_local_decls = t; + continue; } } @@ -1466,6 +1442,28 @@ expand_used_vars (void) fini_vars_expansion (); } + /* If there were any artificial non-ignored vars without rtl + found earlier, see if deferred stack allocation hasn't assigned + rtl to them. */ + for (t = maybe_local_decls; t; t = next) + { + tree var = TREE_VALUE (t); + rtx rtl = DECL_RTL_IF_SET (var); + + next = TREE_CHAIN (t); + + /* Keep artificial non-ignored vars in cfun->local_decls + chain until instantiate_decls. */ + if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT)) + { + TREE_CHAIN (t) = cfun->local_decls; + cfun->local_decls = t; + continue; + } + + ggc_free (t); + } + /* If the target requires that FRAME_OFFSET be aligned, do it. */ if (STACK_ALIGNMENT_NEEDED) { @@ -1663,8 +1661,8 @@ expand_gimple_cond (basic_block bb, gimple stmt) two-way jump that needs to be decomposed into two basic blocks. */ if (false_edge->dest == bb->next_bb) { - jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest)); - add_reg_br_prob_note (last, true_edge->probability); + jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest), + true_edge->probability); maybe_dump_rtl_for_gimple_stmt (stmt, last); if (true_edge->goto_locus) { @@ -1679,8 +1677,8 @@ expand_gimple_cond (basic_block bb, gimple stmt) } if (true_edge->dest == bb->next_bb) { - jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest)); - add_reg_br_prob_note (last, false_edge->probability); + jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest), + false_edge->probability); maybe_dump_rtl_for_gimple_stmt (stmt, last); if (false_edge->goto_locus) { @@ -1694,8 +1692,8 @@ expand_gimple_cond (basic_block bb, gimple stmt) return NULL; } - jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest)); - add_reg_br_prob_note (last, true_edge->probability); + jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest), + true_edge->probability); last = get_last_insn (); if (false_edge->goto_locus) { @@ -2239,6 +2237,7 @@ expand_debug_expr (tree exp) switch (TREE_CODE (exp)) { case COND_EXPR: + case DOT_PROD_EXPR: goto ternary; case TRUTH_ANDIF_EXPR: @@ -2347,8 +2346,7 @@ expand_debug_expr (tree exp) || mode == VOIDmode) return NULL; - op0 = DECL_RTL (exp); - SET_DECL_RTL (exp, NULL); + op0 = make_decl_rtl_for_debug (exp); if (!MEM_P (op0) || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp) @@ -2357,7 +2355,12 @@ expand_debug_expr (tree exp) else op0 = copy_rtx (op0); - if (GET_MODE (op0) == BLKmode) + if (GET_MODE (op0) == BLKmode + /* If op0 is not BLKmode, but BLKmode is, adjust_mode + below would ICE. While it is likely a FE bug, + try to be robust here. See PR43166. */ + || mode == BLKmode + || (mode == VOIDmode && GET_MODE (op0) != VOIDmode)) { gcc_assert (MEM_P (op0)); op0 = adjust_address_nv (op0, mode, 0); @@ -2378,7 +2381,10 @@ expand_debug_expr (tree exp) if (inner_mode == VOIDmode) { - inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))); + if (TREE_CODE (exp) == SSA_NAME) + inner_mode = TYPE_MODE (TREE_TYPE (exp)); + else + inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))); if (mode == inner_mode) return op0; } @@ -2394,6 +2400,7 @@ expand_debug_expr (tree exp) } else if (FLOAT_MODE_P (mode)) { + gcc_assert (TREE_CODE (exp) != SSA_NAME); if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))) op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode); else @@ -2496,7 +2503,8 @@ expand_debug_expr (tree exp) { enum machine_mode addrmode, offmode; - gcc_assert (MEM_P (op0)); + if (!MEM_P (op0)) + return NULL; op0 = XEXP (op0, 0); addrmode = GET_MODE (op0); @@ -2556,13 +2564,14 @@ expand_debug_expr (tree exp) if (bitpos < 0) return NULL; + if (GET_MODE (op0) == BLKmode) + return NULL; + if ((bitpos % BITS_PER_UNIT) == 0 && bitsize == GET_MODE_BITSIZE (mode1)) { enum machine_mode opmode = GET_MODE (op0); - gcc_assert (opmode != BLKmode); - if (opmode == VOIDmode) opmode = mode1; @@ -2575,8 +2584,9 @@ expand_debug_expr (tree exp) if (bitpos >= GET_MODE_BITSIZE (opmode)) return NULL; - return simplify_gen_subreg (mode, op0, opmode, - bitpos / BITS_PER_UNIT); + if ((bitpos % GET_MODE_BITSIZE (mode)) == 0) + return simplify_gen_subreg (mode, op0, opmode, + bitpos / BITS_PER_UNIT); } return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0)) @@ -2610,6 +2620,22 @@ expand_debug_expr (tree exp) return gen_rtx_FIX (mode, op0); case POINTER_PLUS_EXPR: + /* For the rare target where pointers are not the same size as + size_t, we need to check for mis-matched modes and correct + the addend. */ + if (op0 && op1 + && GET_MODE (op0) != VOIDmode && GET_MODE (op1) != VOIDmode + && GET_MODE (op0) != GET_MODE (op1)) + { + if (GET_MODE_BITSIZE (GET_MODE (op0)) < GET_MODE_BITSIZE (GET_MODE (op1))) + op1 = gen_rtx_TRUNCATE (GET_MODE (op0), op1); + else + /* We always sign-extend, regardless of the signedness of + the operand, because the operand is always unsigned + here even if the original C expression is signed. */ + op1 = gen_rtx_SIGN_EXTEND (GET_MODE (op0), op1); + } + /* Fall through. */ case PLUS_EXPR: return gen_rtx_PLUS (mode, op0, op1); @@ -2926,20 +2952,106 @@ expand_debug_expr (tree exp) case SSA_NAME: { - int part = var_to_partition (SA.map, exp); + gimple g = get_gimple_for_ssa_name (exp); + if (g) + { + op0 = expand_debug_expr (gimple_assign_rhs_to_tree (g)); + if (!op0) + return NULL; + } + else + { + int part = var_to_partition (SA.map, exp); - if (part == NO_PARTITION) - return NULL; + if (part == NO_PARTITION) + return NULL; - gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions); + gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions); - op0 = SA.partition_to_pseudo[part]; + op0 = SA.partition_to_pseudo[part]; + } goto adjust_mode; } case ERROR_MARK: return NULL; + /* Vector stuff. For most of the codes we don't have rtl codes. */ + case REALIGN_LOAD_EXPR: + case REDUC_MAX_EXPR: + case REDUC_MIN_EXPR: + case REDUC_PLUS_EXPR: + case VEC_COND_EXPR: + case VEC_EXTRACT_EVEN_EXPR: + case VEC_EXTRACT_ODD_EXPR: + case VEC_INTERLEAVE_HIGH_EXPR: + case VEC_INTERLEAVE_LOW_EXPR: + case VEC_LSHIFT_EXPR: + case VEC_PACK_FIX_TRUNC_EXPR: + case VEC_PACK_SAT_EXPR: + case VEC_PACK_TRUNC_EXPR: + case VEC_RSHIFT_EXPR: + case VEC_UNPACK_FLOAT_HI_EXPR: + case VEC_UNPACK_FLOAT_LO_EXPR: + case VEC_UNPACK_HI_EXPR: + case VEC_UNPACK_LO_EXPR: + case VEC_WIDEN_MULT_HI_EXPR: + case VEC_WIDEN_MULT_LO_EXPR: + return NULL; + + /* Misc codes. */ + case ADDR_SPACE_CONVERT_EXPR: + case FIXED_CONVERT_EXPR: + case OBJ_TYPE_REF: + case WITH_SIZE_EXPR: + return NULL; + + case DOT_PROD_EXPR: + if (SCALAR_INT_MODE_P (GET_MODE (op0)) + && SCALAR_INT_MODE_P (mode)) + { + if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))) + op0 = gen_rtx_ZERO_EXTEND (mode, op0); + else + op0 = gen_rtx_SIGN_EXTEND (mode, op0); + if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))) + op1 = gen_rtx_ZERO_EXTEND (mode, op1); + else + op1 = gen_rtx_SIGN_EXTEND (mode, op1); + op0 = gen_rtx_MULT (mode, op0, op1); + return gen_rtx_PLUS (mode, op0, op2); + } + return NULL; + + case WIDEN_MULT_EXPR: + if (SCALAR_INT_MODE_P (GET_MODE (op0)) + && SCALAR_INT_MODE_P (mode)) + { + enum machine_mode inner_mode = GET_MODE (op0); + if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))) + op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode); + else + op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode); + if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))) + op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode); + else + op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode); + return gen_rtx_MULT (mode, op0, op1); + } + return NULL; + + case WIDEN_SUM_EXPR: + if (SCALAR_INT_MODE_P (GET_MODE (op0)) + && SCALAR_INT_MODE_P (mode)) + { + if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))) + op0 = gen_rtx_ZERO_EXTEND (mode, op0); + else + op0 = gen_rtx_SIGN_EXTEND (mode, op0); + return gen_rtx_PLUS (mode, op0, op1); + } + return NULL; + default: flag_unsupported: #ifdef ENABLE_CHECKING @@ -3090,6 +3202,105 @@ expand_gimple_basic_block (basic_block bb) basic_block new_bb; stmt = gsi_stmt (gsi); + + /* If this statement is a non-debug one, and we generate debug + insns, then this one might be the last real use of a TERed + SSA_NAME, but where there are still some debug uses further + down. Expanding the current SSA name in such further debug + uses by their RHS might lead to wrong debug info, as coalescing + might make the operands of such RHS be placed into the same + pseudo as something else. Like so: + a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead + use(a_1); + a_2 = ... + #DEBUG ... => a_1 + As a_0 and a_2 don't overlap in lifetime, assume they are coalesced. + If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use, + the write to a_2 would actually have clobbered the place which + formerly held a_0. + + So, instead of that, we recognize the situation, and generate + debug temporaries at the last real use of TERed SSA names: + a_1 = a_0 + 1; + #DEBUG #D1 => a_1 + use(a_1); + a_2 = ... + #DEBUG ... => #D1 + */ + if (MAY_HAVE_DEBUG_INSNS + && SA.values + && !is_gimple_debug (stmt)) + { + ssa_op_iter iter; + tree op; + gimple def; + + location_t sloc = get_curr_insn_source_location (); + tree sblock = get_curr_insn_block (); + + /* Look for SSA names that have their last use here (TERed + names always have only one real use). */ + FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE) + if ((def = get_gimple_for_ssa_name (op))) + { + imm_use_iterator imm_iter; + use_operand_p use_p; + bool have_debug_uses = false; + + FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op) + { + if (gimple_debug_bind_p (USE_STMT (use_p))) + { + have_debug_uses = true; + break; + } + } + + if (have_debug_uses) + { + /* OP is a TERed SSA name, with DEF it's defining + statement, and where OP is used in further debug + instructions. Generate a debug temporary, and + replace all uses of OP in debug insns with that + temporary. */ + gimple debugstmt; + tree value = gimple_assign_rhs_to_tree (def); + tree vexpr = make_node (DEBUG_EXPR_DECL); + rtx val; + enum machine_mode mode; + + set_curr_insn_source_location (gimple_location (def)); + set_curr_insn_block (gimple_block (def)); + + DECL_ARTIFICIAL (vexpr) = 1; + TREE_TYPE (vexpr) = TREE_TYPE (value); + if (DECL_P (value)) + mode = DECL_MODE (value); + else + mode = TYPE_MODE (TREE_TYPE (value)); + DECL_MODE (vexpr) = mode; + + val = gen_rtx_VAR_LOCATION + (mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED); + + val = emit_debug_insn (val); + + FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op) + { + if (!gimple_debug_bind_p (debugstmt)) + continue; + + FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter) + SET_USE (use_p, vexpr); + + update_stmt (debugstmt); + } + } + } + set_curr_insn_source_location (sloc); + set_curr_insn_block (sblock); + } + currently_expanding_gimple_stmt = stmt; /* Expand this statement, then evaluate the resulting RTL and @@ -3142,6 +3353,13 @@ expand_gimple_basic_block (basic_block bb) INSN_VAR_LOCATION_LOC (val) = (rtx)value; } + /* In order not to generate too many debug temporaries, + we delink all uses of debug statements we already expanded. + Therefore debug statements between definition and real + use of TERed SSA names will continue to use the SSA name, + and not be replaced with debug temps. */ + delink_stmt_imm_use (stmt); + gsi = nsi; gsi_next (&nsi); if (gsi_end_p (nsi)) @@ -3425,7 +3643,8 @@ discover_nonconstant_array_refs (void) for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) { gimple stmt = gsi_stmt (gsi); - walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL); + if (!is_gimple_debug (stmt)) + walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL); } } @@ -3566,10 +3785,12 @@ gimple_expand_cfg (void) { if (cfun->calls_alloca) warning (OPT_Wstack_protector, - "not protecting local variables: variable length buffer"); + "stack protector not protecting local variables: " + "variable length buffer"); if (has_short_buffer && !crtl->stack_protect_guard) warning (OPT_Wstack_protector, - "not protecting function: no buffer at least %d bytes long", + "stack protector not protecting function: " + "all local arrays are less than %d bytes long", (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE)); }