{
t = gimple_assign_rhs1 (stmt);
/* Avoid modifying this tree in place below. */
- if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
- && gimple_location (stmt) != EXPR_LOCATION (t))
+ if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
+ && gimple_location (stmt) != EXPR_LOCATION (t))
+ || (gimple_block (stmt)
+ && currently_expanding_to_rtl
+ && EXPR_P (t)
+ && gimple_block (stmt) != TREE_BLOCK (t)))
t = copy_node (t);
}
else
if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
SET_EXPR_LOCATION (t, gimple_location (stmt));
+ if (gimple_block (stmt) && currently_expanding_to_rtl && EXPR_P (t))
+ TREE_BLOCK (t) = gimple_block (stmt);
return t;
}
expand_used_vars (void)
{
tree t, next, outer_block = DECL_INITIAL (current_function_decl);
+ tree maybe_local_decls = NULL_TREE;
unsigned i;
/* Compute the phase of the stack frame for this function. */
if (is_gimple_reg (var))
{
TREE_USED (var) = 0;
- ggc_free (t);
- continue;
+ goto next;
}
/* We didn't set a block for static or extern because it's hard
to tell the difference between a global variable (re)declared
TREE_USED (var) = 1;
if (expand_now)
+ expand_one_var (var, true, true);
+
+ next:
+ if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
{
- expand_one_var (var, true, true);
- if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
- {
- rtx rtl = DECL_RTL_IF_SET (var);
+ rtx rtl = DECL_RTL_IF_SET (var);
- /* Keep artificial non-ignored vars in cfun->local_decls
- chain until instantiate_decls. */
- if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
- {
- TREE_CHAIN (t) = cfun->local_decls;
- cfun->local_decls = t;
- continue;
- }
+ /* Keep artificial non-ignored vars in cfun->local_decls
+ chain until instantiate_decls. */
+ if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
+ {
+ TREE_CHAIN (t) = cfun->local_decls;
+ cfun->local_decls = t;
+ continue;
+ }
+ else if (rtl == NULL_RTX)
+ {
+ /* If rtl isn't set yet, which can happen e.g. with
+ -fstack-protector, retry before returning from this
+ function. */
+ TREE_CHAIN (t) = maybe_local_decls;
+ maybe_local_decls = t;
+ continue;
}
}
fini_vars_expansion ();
}
+ /* If there were any artificial non-ignored vars without rtl
+ found earlier, see if deferred stack allocation hasn't assigned
+ rtl to them. */
+ for (t = maybe_local_decls; t; t = next)
+ {
+ tree var = TREE_VALUE (t);
+ rtx rtl = DECL_RTL_IF_SET (var);
+
+ next = TREE_CHAIN (t);
+
+ /* Keep artificial non-ignored vars in cfun->local_decls
+ chain until instantiate_decls. */
+ if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
+ {
+ TREE_CHAIN (t) = cfun->local_decls;
+ cfun->local_decls = t;
+ continue;
+ }
+
+ ggc_free (t);
+ }
+
/* If the target requires that FRAME_OFFSET be aligned, do it. */
if (STACK_ALIGNMENT_NEEDED)
{
switch (TREE_CODE (exp))
{
case COND_EXPR:
+ case DOT_PROD_EXPR:
goto ternary;
case TRUTH_ANDIF_EXPR:
|| mode == VOIDmode)
return NULL;
- op0 = DECL_RTL (exp);
- SET_DECL_RTL (exp, NULL);
+ op0 = make_decl_rtl_for_debug (exp);
if (!MEM_P (op0)
|| GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
|| SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
else
op0 = copy_rtx (op0);
- if (GET_MODE (op0) == BLKmode)
+ if (GET_MODE (op0) == BLKmode
+ /* If op0 is not BLKmode, but BLKmode is, adjust_mode
+ below would ICE. While it is likely a FE bug,
+ try to be robust here. See PR43166. */
+ || mode == BLKmode
+ || (mode == VOIDmode && GET_MODE (op0) != VOIDmode))
{
gcc_assert (MEM_P (op0));
op0 = adjust_address_nv (op0, mode, 0);
if (inner_mode == VOIDmode)
{
- inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
+ if (TREE_CODE (exp) == SSA_NAME)
+ inner_mode = TYPE_MODE (TREE_TYPE (exp));
+ else
+ inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
if (mode == inner_mode)
return op0;
}
}
else if (FLOAT_MODE_P (mode))
{
+ gcc_assert (TREE_CODE (exp) != SSA_NAME);
if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode);
else
{
enum machine_mode addrmode, offmode;
- gcc_assert (MEM_P (op0));
+ if (!MEM_P (op0))
+ return NULL;
op0 = XEXP (op0, 0);
addrmode = GET_MODE (op0);
if (bitpos >= GET_MODE_BITSIZE (opmode))
return NULL;
- return simplify_gen_subreg (mode, op0, opmode,
- bitpos / BITS_PER_UNIT);
+ if ((bitpos % GET_MODE_BITSIZE (mode)) == 0)
+ return simplify_gen_subreg (mode, op0, opmode,
+ bitpos / BITS_PER_UNIT);
}
return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
case SSA_NAME:
{
- int part = var_to_partition (SA.map, exp);
+ gimple g = get_gimple_for_ssa_name (exp);
+ if (g)
+ {
+ op0 = expand_debug_expr (gimple_assign_rhs_to_tree (g));
+ if (!op0)
+ return NULL;
+ }
+ else
+ {
+ int part = var_to_partition (SA.map, exp);
- if (part == NO_PARTITION)
- return NULL;
+ if (part == NO_PARTITION)
+ return NULL;
- gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
+ gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions);
- op0 = SA.partition_to_pseudo[part];
+ op0 = SA.partition_to_pseudo[part];
+ }
goto adjust_mode;
}
case ERROR_MARK:
return NULL;
+ /* Vector stuff. For most of the codes we don't have rtl codes. */
+ case REALIGN_LOAD_EXPR:
+ case REDUC_MAX_EXPR:
+ case REDUC_MIN_EXPR:
+ case REDUC_PLUS_EXPR:
+ case VEC_COND_EXPR:
+ case VEC_EXTRACT_EVEN_EXPR:
+ case VEC_EXTRACT_ODD_EXPR:
+ case VEC_INTERLEAVE_HIGH_EXPR:
+ case VEC_INTERLEAVE_LOW_EXPR:
+ case VEC_LSHIFT_EXPR:
+ case VEC_PACK_FIX_TRUNC_EXPR:
+ case VEC_PACK_SAT_EXPR:
+ case VEC_PACK_TRUNC_EXPR:
+ case VEC_RSHIFT_EXPR:
+ case VEC_UNPACK_FLOAT_HI_EXPR:
+ case VEC_UNPACK_FLOAT_LO_EXPR:
+ case VEC_UNPACK_HI_EXPR:
+ case VEC_UNPACK_LO_EXPR:
+ case VEC_WIDEN_MULT_HI_EXPR:
+ case VEC_WIDEN_MULT_LO_EXPR:
+ return NULL;
+
+ /* Misc codes. */
+ case ADDR_SPACE_CONVERT_EXPR:
+ case FIXED_CONVERT_EXPR:
+ case OBJ_TYPE_REF:
+ case WITH_SIZE_EXPR:
+ return NULL;
+
+ case DOT_PROD_EXPR:
+ if (SCALAR_INT_MODE_P (GET_MODE (op0))
+ && SCALAR_INT_MODE_P (mode))
+ {
+ if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
+ op0 = gen_rtx_ZERO_EXTEND (mode, op0);
+ else
+ op0 = gen_rtx_SIGN_EXTEND (mode, op0);
+ if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
+ op1 = gen_rtx_ZERO_EXTEND (mode, op1);
+ else
+ op1 = gen_rtx_SIGN_EXTEND (mode, op1);
+ op0 = gen_rtx_MULT (mode, op0, op1);
+ return gen_rtx_PLUS (mode, op0, op2);
+ }
+ return NULL;
+
+ case WIDEN_MULT_EXPR:
+ if (SCALAR_INT_MODE_P (GET_MODE (op0))
+ && SCALAR_INT_MODE_P (mode))
+ {
+ if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
+ op0 = gen_rtx_ZERO_EXTEND (mode, op0);
+ else
+ op0 = gen_rtx_SIGN_EXTEND (mode, op0);
+ if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
+ op1 = gen_rtx_ZERO_EXTEND (mode, op1);
+ else
+ op1 = gen_rtx_SIGN_EXTEND (mode, op1);
+ return gen_rtx_MULT (mode, op0, op1);
+ }
+ return NULL;
+
+ case WIDEN_SUM_EXPR:
+ if (SCALAR_INT_MODE_P (GET_MODE (op0))
+ && SCALAR_INT_MODE_P (mode))
+ {
+ if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
+ op0 = gen_rtx_ZERO_EXTEND (mode, op0);
+ else
+ op0 = gen_rtx_SIGN_EXTEND (mode, op0);
+ return gen_rtx_PLUS (mode, op0, op1);
+ }
+ return NULL;
+
default:
flag_unsupported:
#ifdef ENABLE_CHECKING
basic_block new_bb;
stmt = gsi_stmt (gsi);
+
+ /* If this statement is a non-debug one, and we generate debug
+ insns, then this one might be the last real use of a TERed
+ SSA_NAME, but where there are still some debug uses further
+ down. Expanding the current SSA name in such further debug
+ uses by their RHS might lead to wrong debug info, as coalescing
+ might make the operands of such RHS be placed into the same
+ pseudo as something else. Like so:
+ a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
+ use(a_1);
+ a_2 = ...
+ #DEBUG ... => a_1
+ As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
+ If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
+ the write to a_2 would actually have clobbered the place which
+ formerly held a_0.
+
+ So, instead of that, we recognize the situation, and generate
+ debug temporaries at the last real use of TERed SSA names:
+ a_1 = a_0 + 1;
+ #DEBUG #D1 => a_1
+ use(a_1);
+ a_2 = ...
+ #DEBUG ... => #D1
+ */
+ if (MAY_HAVE_DEBUG_INSNS
+ && SA.values
+ && !is_gimple_debug (stmt))
+ {
+ ssa_op_iter iter;
+ tree op;
+ gimple def;
+
+ location_t sloc = get_curr_insn_source_location ();
+ tree sblock = get_curr_insn_block ();
+
+ /* Look for SSA names that have their last use here (TERed
+ names always have only one real use). */
+ FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
+ if ((def = get_gimple_for_ssa_name (op)))
+ {
+ imm_use_iterator imm_iter;
+ use_operand_p use_p;
+ bool have_debug_uses = false;
+
+ FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op)
+ {
+ if (gimple_debug_bind_p (USE_STMT (use_p)))
+ {
+ have_debug_uses = true;
+ break;
+ }
+ }
+
+ if (have_debug_uses)
+ {
+ /* OP is a TERed SSA name, with DEF it's defining
+ statement, and where OP is used in further debug
+ instructions. Generate a debug temporary, and
+ replace all uses of OP in debug insns with that
+ temporary. */
+ gimple debugstmt;
+ tree value = gimple_assign_rhs_to_tree (def);
+ tree vexpr = make_node (DEBUG_EXPR_DECL);
+ rtx val;
+ enum machine_mode mode;
+
+ set_curr_insn_source_location (gimple_location (def));
+ set_curr_insn_block (gimple_block (def));
+
+ DECL_ARTIFICIAL (vexpr) = 1;
+ TREE_TYPE (vexpr) = TREE_TYPE (value);
+ if (DECL_P (value))
+ mode = DECL_MODE (value);
+ else
+ mode = TYPE_MODE (TREE_TYPE (value));
+ DECL_MODE (vexpr) = mode;
+
+ val = gen_rtx_VAR_LOCATION
+ (mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED);
+
+ val = emit_debug_insn (val);
+
+ FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op)
+ {
+ if (!gimple_debug_bind_p (debugstmt))
+ continue;
+
+ FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
+ SET_USE (use_p, vexpr);
+
+ update_stmt (debugstmt);
+ }
+ }
+ }
+ set_curr_insn_source_location (sloc);
+ set_curr_insn_block (sblock);
+ }
+
currently_expanding_gimple_stmt = stmt;
/* Expand this statement, then evaluate the resulting RTL and
INSN_VAR_LOCATION_LOC (val) = (rtx)value;
}
+ /* In order not to generate too many debug temporaries,
+ we delink all uses of debug statements we already expanded.
+ Therefore debug statements between definition and real
+ use of TERed SSA names will continue to use the SSA name,
+ and not be replaced with debug temps. */
+ delink_stmt_imm_use (stmt);
+
gsi = nsi;
gsi_next (&nsi);
if (gsi_end_p (nsi))
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
gimple stmt = gsi_stmt (gsi);
- walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
+ if (!is_gimple_debug (stmt))
+ walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
}
}