{
t = gimple_assign_rhs1 (stmt);
/* Avoid modifying this tree in place below. */
- if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
- && gimple_location (stmt) != EXPR_LOCATION (t))
+ if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)
+ && gimple_location (stmt) != EXPR_LOCATION (t))
+ || (gimple_block (stmt)
+ && currently_expanding_to_rtl
+ && EXPR_P (t)
+ && gimple_block (stmt) != TREE_BLOCK (t)))
t = copy_node (t);
}
else
if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t))
SET_EXPR_LOCATION (t, gimple_location (stmt));
+ if (gimple_block (stmt) && currently_expanding_to_rtl && EXPR_P (t))
+ TREE_BLOCK (t) = gimple_block (stmt);
return t;
}
for (j = i; j != EOC; j = stack_vars[j].next)
{
tree decl = stack_vars[j].decl;
- unsigned int uid = DECL_UID (decl);
+ unsigned int uid = DECL_PT_UID (decl);
/* We should never end up partitioning SSA names (though they
may end up on the stack). Neither should we allocate stack
space to something that is unused and thus unreferenced. */
gcc_assert (DECL_P (decl)
- && referenced_var_lookup (uid));
+ && referenced_var_lookup (DECL_UID (decl)));
bitmap_set_bit (part, uid);
*((bitmap *) pointer_map_insert (decls_to_partitions,
(void *)(size_t) uid)) = part;
/* Make the SSA name point to all partition members. */
pi = get_ptr_info (name);
- pt_solution_set (&pi->pt, part);
+ pt_solution_set (&pi->pt, part, false, false);
}
/* Make all points-to sets that contain one member of a partition
add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
decls_to_partitions, visited, temp);
- add_partitioned_vars_to_ptset (&cfun->gimple_df->callused,
- decls_to_partitions, visited, temp);
pointer_set_destroy (visited);
pointer_map_destroy (decls_to_partitions);
expand_used_vars (void)
{
tree t, next, outer_block = DECL_INITIAL (current_function_decl);
+ tree maybe_local_decls = NULL_TREE;
unsigned i;
/* Compute the phase of the stack frame for this function. */
if (is_gimple_reg (var))
{
TREE_USED (var) = 0;
- ggc_free (t);
- continue;
+ goto next;
}
/* We didn't set a block for static or extern because it's hard
to tell the difference between a global variable (re)declared
TREE_USED (var) = 1;
if (expand_now)
+ expand_one_var (var, true, true);
+
+ next:
+ if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
{
- expand_one_var (var, true, true);
- if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
- {
- rtx rtl = DECL_RTL_IF_SET (var);
+ rtx rtl = DECL_RTL_IF_SET (var);
- /* Keep artificial non-ignored vars in cfun->local_decls
- chain until instantiate_decls. */
- if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
- {
- TREE_CHAIN (t) = cfun->local_decls;
- cfun->local_decls = t;
- continue;
- }
+ /* Keep artificial non-ignored vars in cfun->local_decls
+ chain until instantiate_decls. */
+ if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
+ {
+ TREE_CHAIN (t) = cfun->local_decls;
+ cfun->local_decls = t;
+ continue;
+ }
+ else if (rtl == NULL_RTX)
+ {
+ /* If rtl isn't set yet, which can happen e.g. with
+ -fstack-protector, retry before returning from this
+ function. */
+ TREE_CHAIN (t) = maybe_local_decls;
+ maybe_local_decls = t;
+ continue;
}
}
fini_vars_expansion ();
}
+ /* If there were any artificial non-ignored vars without rtl
+ found earlier, see if deferred stack allocation hasn't assigned
+ rtl to them. */
+ for (t = maybe_local_decls; t; t = next)
+ {
+ tree var = TREE_VALUE (t);
+ rtx rtl = DECL_RTL_IF_SET (var);
+
+ next = TREE_CHAIN (t);
+
+ /* Keep artificial non-ignored vars in cfun->local_decls
+ chain until instantiate_decls. */
+ if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
+ {
+ TREE_CHAIN (t) = cfun->local_decls;
+ cfun->local_decls = t;
+ continue;
+ }
+
+ ggc_free (t);
+ }
+
/* If the target requires that FRAME_OFFSET be aligned, do it. */
if (STACK_ALIGNMENT_NEEDED)
{
switch (TREE_CODE (exp))
{
case COND_EXPR:
+ case DOT_PROD_EXPR:
goto ternary;
case TRUTH_ANDIF_EXPR:
|| mode == VOIDmode)
return NULL;
- op0 = DECL_RTL (exp);
- SET_DECL_RTL (exp, NULL);
+ op0 = make_decl_rtl_for_debug (exp);
if (!MEM_P (op0)
|| GET_CODE (XEXP (op0, 0)) != SYMBOL_REF
|| SYMBOL_REF_DECL (XEXP (op0, 0)) != exp)
else
op0 = copy_rtx (op0);
- if (GET_MODE (op0) == BLKmode)
+ if (GET_MODE (op0) == BLKmode
+ /* If op0 is not BLKmode, but BLKmode is, adjust_mode
+ below would ICE. While it is likely a FE bug,
+ try to be robust here. See PR43166. */
+ || mode == BLKmode
+ || (mode == VOIDmode && GET_MODE (op0) != VOIDmode))
{
gcc_assert (MEM_P (op0));
op0 = adjust_address_nv (op0, mode, 0);
{
enum machine_mode addrmode, offmode;
- gcc_assert (MEM_P (op0));
+ if (!MEM_P (op0))
+ return NULL;
op0 = XEXP (op0, 0);
addrmode = GET_MODE (op0);
if (bitpos >= GET_MODE_BITSIZE (opmode))
return NULL;
- return simplify_gen_subreg (mode, op0, opmode,
- bitpos / BITS_PER_UNIT);
+ if ((bitpos % GET_MODE_BITSIZE (mode)) == 0)
+ return simplify_gen_subreg (mode, op0, opmode,
+ bitpos / BITS_PER_UNIT);
}
return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0))
return gen_rtx_FIX (mode, op0);
case POINTER_PLUS_EXPR:
+ /* For the rare target where pointers are not the same size as
+ size_t, we need to check for mis-matched modes and correct
+ the addend. */
+ if (op0 && op1
+ && GET_MODE (op0) != VOIDmode && GET_MODE (op1) != VOIDmode
+ && GET_MODE (op0) != GET_MODE (op1))
+ {
+ if (GET_MODE_BITSIZE (GET_MODE (op0)) < GET_MODE_BITSIZE (GET_MODE (op1)))
+ op1 = gen_rtx_TRUNCATE (GET_MODE (op0), op1);
+ else
+ /* We always sign-extend, regardless of the signedness of
+ the operand, because the operand is always unsigned
+ here even if the original C expression is signed. */
+ op1 = gen_rtx_SIGN_EXTEND (GET_MODE (op0), op1);
+ }
+ /* Fall through. */
case PLUS_EXPR:
return gen_rtx_PLUS (mode, op0, op1);
case ERROR_MARK:
return NULL;
+ /* Vector stuff. For most of the codes we don't have rtl codes. */
+ case REALIGN_LOAD_EXPR:
+ case REDUC_MAX_EXPR:
+ case REDUC_MIN_EXPR:
+ case REDUC_PLUS_EXPR:
+ case VEC_COND_EXPR:
+ case VEC_EXTRACT_EVEN_EXPR:
+ case VEC_EXTRACT_ODD_EXPR:
+ case VEC_INTERLEAVE_HIGH_EXPR:
+ case VEC_INTERLEAVE_LOW_EXPR:
+ case VEC_LSHIFT_EXPR:
+ case VEC_PACK_FIX_TRUNC_EXPR:
+ case VEC_PACK_SAT_EXPR:
+ case VEC_PACK_TRUNC_EXPR:
+ case VEC_RSHIFT_EXPR:
+ case VEC_UNPACK_FLOAT_HI_EXPR:
+ case VEC_UNPACK_FLOAT_LO_EXPR:
+ case VEC_UNPACK_HI_EXPR:
+ case VEC_UNPACK_LO_EXPR:
+ case VEC_WIDEN_MULT_HI_EXPR:
+ case VEC_WIDEN_MULT_LO_EXPR:
+ return NULL;
+
+ /* Misc codes. */
+ case ADDR_SPACE_CONVERT_EXPR:
+ case FIXED_CONVERT_EXPR:
+ case OBJ_TYPE_REF:
+ case WITH_SIZE_EXPR:
+ return NULL;
+
+ case DOT_PROD_EXPR:
+ if (SCALAR_INT_MODE_P (GET_MODE (op0))
+ && SCALAR_INT_MODE_P (mode))
+ {
+ if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
+ op0 = gen_rtx_ZERO_EXTEND (mode, op0);
+ else
+ op0 = gen_rtx_SIGN_EXTEND (mode, op0);
+ if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
+ op1 = gen_rtx_ZERO_EXTEND (mode, op1);
+ else
+ op1 = gen_rtx_SIGN_EXTEND (mode, op1);
+ op0 = gen_rtx_MULT (mode, op0, op1);
+ return gen_rtx_PLUS (mode, op0, op2);
+ }
+ return NULL;
+
+ case WIDEN_MULT_EXPR:
+ if (SCALAR_INT_MODE_P (GET_MODE (op0))
+ && SCALAR_INT_MODE_P (mode))
+ {
+ enum machine_mode inner_mode = GET_MODE (op0);
+ if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
+ op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
+ else
+ op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
+ if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
+ op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode);
+ else
+ op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode);
+ return gen_rtx_MULT (mode, op0, op1);
+ }
+ return NULL;
+
+ case WIDEN_SUM_EXPR:
+ if (SCALAR_INT_MODE_P (GET_MODE (op0))
+ && SCALAR_INT_MODE_P (mode))
+ {
+ if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
+ op0 = gen_rtx_ZERO_EXTEND (mode, op0);
+ else
+ op0 = gen_rtx_SIGN_EXTEND (mode, op0);
+ return gen_rtx_PLUS (mode, op0, op1);
+ }
+ return NULL;
+
default:
flag_unsupported:
#ifdef ENABLE_CHECKING
for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
{
gimple stmt = gsi_stmt (gsi);
- walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
+ if (!is_gimple_debug (stmt))
+ walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
}
}
{
if (cfun->calls_alloca)
warning (OPT_Wstack_protector,
- "not protecting local variables: variable length buffer");
+ "stack protector not protecting local variables: "
+ "variable length buffer");
if (has_short_buffer && !crtl->stack_protect_guard)
warning (OPT_Wstack_protector,
- "not protecting function: no buffer at least %d bytes long",
+ "stack protector not protecting function: "
+ "all local arrays are less than %d bytes long",
(int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
}