of comminucating the profile info to the builtin expanders. */
gimple currently_expanding_gimple_stmt;
+static rtx expand_debug_expr (tree);
+
/* Return an expression tree corresponding to the RHS of GIMPLE
statement STMT. */
/* The Variable. */
tree decl;
- /* The offset of the variable. During partitioning, this is the
- offset relative to the partition. After partitioning, this
- is relative to the stack frame. */
- HOST_WIDE_INT offset;
-
/* Initially, the size of the variable. Later, the size of the partition,
if this variable becomes it's partition's representative. */
HOST_WIDE_INT size;
v = &stack_vars[stack_vars_num];
v->decl = decl;
- v->offset = 0;
v->size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (decl)), 1);
/* Ensure that all variables have size, so that &a != &b for any two
variables that are simultaneously live. */
if (v->size == 0)
v->size = 1;
v->alignb = align_local_variable (SSAVAR (decl));
+ /* An alignment of zero can mightily confuse us later. */
+ gcc_assert (v->alignb != 0);
/* All variables are initially in their own partition. */
v->representative = stack_vars_num;
to elements will conflict. In case of unions we have
to be careful as type based aliasing rules may say
access to the same memory does not conflict. So play
- safe and add a conflict in this case. */
- || contains_union)
+ safe and add a conflict in this case when
+ -fstrict-aliasing is used. */
+ || (contains_union && flag_strict_aliasing))
add_stack_var_conflict (i, j);
}
}
return (int)largeb - (int)largea;
/* Secondary compare on size, decreasing */
- if (sizea < sizeb)
- return -1;
if (sizea > sizeb)
+ return -1;
+ if (sizea < sizeb)
return 1;
/* Tertiary compare on true alignment, decreasing. */
/* Make the SSA name point to all partition members. */
pi = get_ptr_info (name);
- pt_solution_set (&pi->pt, part, false, false);
+ pt_solution_set (&pi->pt, part, false);
}
/* Make all points-to sets that contain one member of a partition
/* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
partitioning algorithm. Partitions A and B are known to be non-conflicting.
- Merge them into a single partition A.
-
- At the same time, add OFFSET to all variables in partition B. At the end
- of the partitioning process we've have a nice block easy to lay out within
- the stack frame. */
+ Merge them into a single partition A. */
static void
-union_stack_vars (size_t a, size_t b, HOST_WIDE_INT offset)
+union_stack_vars (size_t a, size_t b)
{
- size_t i, last;
struct stack_var *vb = &stack_vars[b];
bitmap_iterator bi;
unsigned u;
- /* Update each element of partition B with the given offset,
- and merge them into partition A. */
- for (last = i = b; i != EOC; last = i, i = stack_vars[i].next)
- {
- stack_vars[i].offset += offset;
- stack_vars[i].representative = a;
- }
- stack_vars[last].next = stack_vars[a].next;
+ gcc_assert (stack_vars[b].next == EOC);
+ /* Add B to A's partition. */
+ stack_vars[b].next = stack_vars[a].next;
+ stack_vars[b].representative = a;
stack_vars[a].next = b;
/* Update the required alignment of partition A to account for B. */
partitions constrained by the interference graph. The overall
algorithm used is as follows:
- Sort the objects by size.
+ Sort the objects by size in descending order.
For each object A {
S = size(A)
O = 0
loop {
Look for the largest non-conflicting object B with size <= S.
UNION (A, B)
- offset(B) = O
- O += size(B)
- S -= size(B)
}
}
*/
for (si = 0; si < n; ++si)
{
size_t i = stack_vars_sorted[si];
- HOST_WIDE_INT isize = stack_vars[i].size;
unsigned int ialign = stack_vars[i].alignb;
- HOST_WIDE_INT offset = 0;
- for (sj = si; sj-- > 0; )
+ /* Ignore objects that aren't partition representatives. If we
+ see a var that is not a partition representative, it must
+ have been merged earlier. */
+ if (stack_vars[i].representative != i)
+ continue;
+
+ for (sj = si + 1; sj < n; ++sj)
{
size_t j = stack_vars_sorted[sj];
- HOST_WIDE_INT jsize = stack_vars[j].size;
unsigned int jalign = stack_vars[j].alignb;
/* Ignore objects that aren't partition representatives. */
if (stack_vars[j].representative != j)
continue;
- /* Ignore objects too large for the remaining space. */
- if (isize < jsize)
- continue;
-
/* Ignore conflicting objects. */
if (stack_var_conflict_p (i, j))
continue;
!= (jalign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT))
continue;
- /* Refine the remaining space check to include alignment. */
- if (offset & (jalign - 1))
- {
- HOST_WIDE_INT toff = offset;
- toff += jalign - 1;
- toff &= -(HOST_WIDE_INT)jalign;
- if (isize - (toff - offset) < jsize)
- continue;
-
- isize -= toff - offset;
- offset = toff;
- }
-
/* UNION the objects, placing J at OFFSET. */
- union_stack_vars (i, j, offset);
-
- isize -= jsize;
- if (isize == 0)
- break;
+ union_stack_vars (i, j);
}
}
{
fputc ('\t', dump_file);
print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
- fprintf (dump_file, ", offset " HOST_WIDE_INT_PRINT_DEC "\n",
- stack_vars[j].offset);
}
+ fputc ('\n', dump_file);
}
}
partition. */
for (j = i; j != EOC; j = stack_vars[j].next)
{
- gcc_assert (stack_vars[j].offset <= stack_vars[i].size);
expand_one_stack_var_at (stack_vars[j].decl,
base, base_align,
- stack_vars[j].offset + offset);
+ offset);
}
}
mark_user_reg (x);
if (POINTER_TYPE_P (type))
- mark_reg_pointer (x, TYPE_ALIGN (TREE_TYPE (type)));
+ mark_reg_pointer (x, get_pointer_alignment (var));
}
/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
/* Expand all variables at this level. */
for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
- if (TREE_USED (t))
+ if (TREE_USED (t)
+ && ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
+ || !DECL_NONSHAREABLE (t)))
expand_one_var (t, toplevel, true);
this_sv_num = stack_vars_num;
for (t = BLOCK_VARS (block); t ; t = DECL_CHAIN (t))
/* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
+ if ((TREE_CODE (t) != VAR_DECL && TREE_CODE (t) != RESULT_DECL)
+ || !DECL_NONSHAREABLE (t))
TREE_USED (t) = 0;
for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
static void
expand_call_stmt (gimple stmt)
{
- tree exp, decl, lhs = gimple_call_lhs (stmt);
+ tree exp, decl, lhs;
bool builtin_p;
size_t i;
+ if (gimple_call_internal_p (stmt))
+ {
+ expand_internal_call (stmt);
+ return;
+ }
+
exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
- CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
+ if (decl
+ && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
+ && (DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA
+ || DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA_WITH_ALIGN))
+ CALL_ALLOCA_FOR_VAR_P (exp) = gimple_call_alloca_for_var_p (stmt);
+ else
+ CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
CALL_CANNOT_INLINE_P (exp) = gimple_call_cannot_inline_p (stmt);
CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
SET_EXPR_LOCATION (exp, gimple_location (stmt));
TREE_BLOCK (exp) = gimple_block (stmt);
+ /* Ensure RTL is created for debug args. */
+ if (decl && DECL_HAS_DEBUG_ARGS_P (decl))
+ {
+ VEC(tree, gc) **debug_args = decl_debug_args_lookup (decl);
+ unsigned int ix;
+ tree dtemp;
+
+ if (debug_args)
+ for (ix = 1; VEC_iterate (tree, *debug_args, ix, dtemp); ix += 2)
+ {
+ gcc_assert (TREE_CODE (dtemp) == DEBUG_EXPR_DECL);
+ expand_debug_expr (dtemp);
+ }
+ }
+
+ lhs = gimple_call_lhs (stmt);
if (lhs)
expand_assignment (lhs, exp, false);
else
if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode)
return x;
- if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (xmode))
+ if (GET_MODE_PRECISION (mode) < GET_MODE_PRECISION (xmode))
x = simplify_gen_subreg (mode, x, xmode,
subreg_lowpart_offset
(mode, xmode));
return x;
}
-/* Return an RTX equivalent to the value of the tree expression
- EXP. */
+/* Return an RTX equivalent to the value of the parameter DECL. */
+
+static rtx
+expand_debug_parm_decl (tree decl)
+{
+ rtx incoming = DECL_INCOMING_RTL (decl);
+
+ if (incoming
+ && GET_MODE (incoming) != BLKmode
+ && ((REG_P (incoming) && HARD_REGISTER_P (incoming))
+ || (MEM_P (incoming)
+ && REG_P (XEXP (incoming, 0))
+ && HARD_REGISTER_P (XEXP (incoming, 0)))))
+ {
+ rtx rtl = gen_rtx_ENTRY_VALUE (GET_MODE (incoming));
+
+#ifdef HAVE_window_save
+ /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
+ If the target machine has an explicit window save instruction, the
+ actual entry value is the corresponding OUTGOING_REGNO instead. */
+ if (REG_P (incoming)
+ && OUTGOING_REGNO (REGNO (incoming)) != REGNO (incoming))
+ incoming
+ = gen_rtx_REG_offset (incoming, GET_MODE (incoming),
+ OUTGOING_REGNO (REGNO (incoming)), 0);
+ else if (MEM_P (incoming))
+ {
+ rtx reg = XEXP (incoming, 0);
+ if (OUTGOING_REGNO (REGNO (reg)) != REGNO (reg))
+ {
+ reg = gen_raw_REG (GET_MODE (reg), OUTGOING_REGNO (REGNO (reg)));
+ incoming = replace_equiv_address_nv (incoming, reg);
+ }
+ }
+#endif
+
+ ENTRY_VALUE_EXP (rtl) = incoming;
+ return rtl;
+ }
+
+ if (incoming
+ && GET_MODE (incoming) != BLKmode
+ && !TREE_ADDRESSABLE (decl)
+ && MEM_P (incoming)
+ && (XEXP (incoming, 0) == virtual_incoming_args_rtx
+ || (GET_CODE (XEXP (incoming, 0)) == PLUS
+ && XEXP (XEXP (incoming, 0), 0) == virtual_incoming_args_rtx
+ && CONST_INT_P (XEXP (XEXP (incoming, 0), 1)))))
+ return incoming;
+
+ return NULL_RTX;
+}
+
+/* Return an RTX equivalent to the value of the tree expression EXP. */
static rtx
expand_debug_expr (tree exp)
{
rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
+ enum machine_mode inner_mode = VOIDmode;
int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
addr_space_t as;
unary:
case tcc_unary:
+ inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
if (!op0)
return NULL_RTX;
|| !TREE_STATIC (exp)
|| !DECL_NAME (exp)
|| DECL_HARD_REGISTER (exp)
+ || DECL_IN_CONSTANT_POOL (exp)
|| mode == VOIDmode)
return NULL;
case NOP_EXPR:
case CONVERT_EXPR:
{
- enum machine_mode inner_mode = GET_MODE (op0);
+ inner_mode = GET_MODE (op0);
if (mode == inner_mode)
return op0;
op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
}
else if (CONSTANT_P (op0)
- || GET_MODE_BITSIZE (mode) <= GET_MODE_BITSIZE (inner_mode))
+ || GET_MODE_PRECISION (mode) <= GET_MODE_PRECISION (inner_mode))
op0 = simplify_gen_subreg (mode, op0, inner_mode,
subreg_lowpart_offset (mode,
inner_mode));
else if (TREE_CODE_CLASS (TREE_CODE (exp)) == tcc_unary
? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
: unsignedp)
- op0 = gen_rtx_ZERO_EXTEND (mode, op0);
+ op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
else
- op0 = gen_rtx_SIGN_EXTEND (mode, op0);
+ op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
return op0;
}
/* Don't use offset_address here, we don't need a
recognizable address, and we don't want to generate
code. */
- op0 = gen_rtx_MEM (mode, gen_rtx_PLUS (addrmode, op0, op1));
+ op0 = gen_rtx_MEM (mode, simplify_gen_binary (PLUS, addrmode,
+ op0, op1));
}
if (MEM_P (op0))
}
case ABS_EXPR:
- return gen_rtx_ABS (mode, op0);
+ return simplify_gen_unary (ABS, mode, op0, mode);
case NEGATE_EXPR:
- return gen_rtx_NEG (mode, op0);
+ return simplify_gen_unary (NEG, mode, op0, mode);
case BIT_NOT_EXPR:
- return gen_rtx_NOT (mode, op0);
+ return simplify_gen_unary (NOT, mode, op0, mode);
case FLOAT_EXPR:
- if (unsignedp)
- return gen_rtx_UNSIGNED_FLOAT (mode, op0);
- else
- return gen_rtx_FLOAT (mode, op0);
+ return simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
+ 0)))
+ ? UNSIGNED_FLOAT : FLOAT, mode, op0,
+ inner_mode);
case FIX_TRUNC_EXPR:
- if (unsignedp)
- return gen_rtx_UNSIGNED_FIX (mode, op0);
- else
- return gen_rtx_FIX (mode, op0);
+ return simplify_gen_unary (unsignedp ? UNSIGNED_FIX : FIX, mode, op0,
+ inner_mode);
case POINTER_PLUS_EXPR:
/* For the rare target where pointers are not the same size as
&& GET_MODE (op0) != GET_MODE (op1))
{
if (GET_MODE_BITSIZE (GET_MODE (op0)) < GET_MODE_BITSIZE (GET_MODE (op1)))
- op1 = gen_rtx_TRUNCATE (GET_MODE (op0), op1);
+ op1 = simplify_gen_unary (TRUNCATE, GET_MODE (op0), op1,
+ GET_MODE (op1));
else
/* We always sign-extend, regardless of the signedness of
the operand, because the operand is always unsigned
here even if the original C expression is signed. */
- op1 = gen_rtx_SIGN_EXTEND (GET_MODE (op0), op1);
+ op1 = simplify_gen_unary (SIGN_EXTEND, GET_MODE (op0), op1,
+ GET_MODE (op1));
}
/* Fall through. */
case PLUS_EXPR:
- return gen_rtx_PLUS (mode, op0, op1);
+ return simplify_gen_binary (PLUS, mode, op0, op1);
case MINUS_EXPR:
- return gen_rtx_MINUS (mode, op0, op1);
+ return simplify_gen_binary (MINUS, mode, op0, op1);
case MULT_EXPR:
- return gen_rtx_MULT (mode, op0, op1);
+ return simplify_gen_binary (MULT, mode, op0, op1);
case RDIV_EXPR:
case TRUNC_DIV_EXPR:
case EXACT_DIV_EXPR:
if (unsignedp)
- return gen_rtx_UDIV (mode, op0, op1);
+ return simplify_gen_binary (UDIV, mode, op0, op1);
else
- return gen_rtx_DIV (mode, op0, op1);
+ return simplify_gen_binary (DIV, mode, op0, op1);
case TRUNC_MOD_EXPR:
- if (unsignedp)
- return gen_rtx_UMOD (mode, op0, op1);
- else
- return gen_rtx_MOD (mode, op0, op1);
+ return simplify_gen_binary (unsignedp ? UMOD : MOD, mode, op0, op1);
case FLOOR_DIV_EXPR:
if (unsignedp)
- return gen_rtx_UDIV (mode, op0, op1);
+ return simplify_gen_binary (UDIV, mode, op0, op1);
else
{
- rtx div = gen_rtx_DIV (mode, op0, op1);
- rtx mod = gen_rtx_MOD (mode, op0, op1);
+ rtx div = simplify_gen_binary (DIV, mode, op0, op1);
+ rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
rtx adj = floor_sdiv_adjust (mode, mod, op1);
- return gen_rtx_PLUS (mode, div, adj);
+ return simplify_gen_binary (PLUS, mode, div, adj);
}
case FLOOR_MOD_EXPR:
if (unsignedp)
- return gen_rtx_UMOD (mode, op0, op1);
+ return simplify_gen_binary (UMOD, mode, op0, op1);
else
{
- rtx mod = gen_rtx_MOD (mode, op0, op1);
+ rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
rtx adj = floor_sdiv_adjust (mode, mod, op1);
- adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
- return gen_rtx_PLUS (mode, mod, adj);
+ adj = simplify_gen_unary (NEG, mode,
+ simplify_gen_binary (MULT, mode, adj, op1),
+ mode);
+ return simplify_gen_binary (PLUS, mode, mod, adj);
}
case CEIL_DIV_EXPR:
if (unsignedp)
{
- rtx div = gen_rtx_UDIV (mode, op0, op1);
- rtx mod = gen_rtx_UMOD (mode, op0, op1);
+ rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
+ rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
rtx adj = ceil_udiv_adjust (mode, mod, op1);
- return gen_rtx_PLUS (mode, div, adj);
+ return simplify_gen_binary (PLUS, mode, div, adj);
}
else
{
- rtx div = gen_rtx_DIV (mode, op0, op1);
- rtx mod = gen_rtx_MOD (mode, op0, op1);
+ rtx div = simplify_gen_binary (DIV, mode, op0, op1);
+ rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
rtx adj = ceil_sdiv_adjust (mode, mod, op1);
- return gen_rtx_PLUS (mode, div, adj);
+ return simplify_gen_binary (PLUS, mode, div, adj);
}
case CEIL_MOD_EXPR:
if (unsignedp)
{
- rtx mod = gen_rtx_UMOD (mode, op0, op1);
+ rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
rtx adj = ceil_udiv_adjust (mode, mod, op1);
- adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
- return gen_rtx_PLUS (mode, mod, adj);
+ adj = simplify_gen_unary (NEG, mode,
+ simplify_gen_binary (MULT, mode, adj, op1),
+ mode);
+ return simplify_gen_binary (PLUS, mode, mod, adj);
}
else
{
- rtx mod = gen_rtx_MOD (mode, op0, op1);
+ rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
rtx adj = ceil_sdiv_adjust (mode, mod, op1);
- adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
- return gen_rtx_PLUS (mode, mod, adj);
+ adj = simplify_gen_unary (NEG, mode,
+ simplify_gen_binary (MULT, mode, adj, op1),
+ mode);
+ return simplify_gen_binary (PLUS, mode, mod, adj);
}
case ROUND_DIV_EXPR:
if (unsignedp)
{
- rtx div = gen_rtx_UDIV (mode, op0, op1);
- rtx mod = gen_rtx_UMOD (mode, op0, op1);
+ rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
+ rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
rtx adj = round_udiv_adjust (mode, mod, op1);
- return gen_rtx_PLUS (mode, div, adj);
+ return simplify_gen_binary (PLUS, mode, div, adj);
}
else
{
- rtx div = gen_rtx_DIV (mode, op0, op1);
- rtx mod = gen_rtx_MOD (mode, op0, op1);
+ rtx div = simplify_gen_binary (DIV, mode, op0, op1);
+ rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
rtx adj = round_sdiv_adjust (mode, mod, op1);
- return gen_rtx_PLUS (mode, div, adj);
+ return simplify_gen_binary (PLUS, mode, div, adj);
}
case ROUND_MOD_EXPR:
if (unsignedp)
{
- rtx mod = gen_rtx_UMOD (mode, op0, op1);
+ rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
rtx adj = round_udiv_adjust (mode, mod, op1);
- adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
- return gen_rtx_PLUS (mode, mod, adj);
+ adj = simplify_gen_unary (NEG, mode,
+ simplify_gen_binary (MULT, mode, adj, op1),
+ mode);
+ return simplify_gen_binary (PLUS, mode, mod, adj);
}
else
{
- rtx mod = gen_rtx_MOD (mode, op0, op1);
+ rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
rtx adj = round_sdiv_adjust (mode, mod, op1);
- adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
- return gen_rtx_PLUS (mode, mod, adj);
+ adj = simplify_gen_unary (NEG, mode,
+ simplify_gen_binary (MULT, mode, adj, op1),
+ mode);
+ return simplify_gen_binary (PLUS, mode, mod, adj);
}
case LSHIFT_EXPR:
- return gen_rtx_ASHIFT (mode, op0, op1);
+ return simplify_gen_binary (ASHIFT, mode, op0, op1);
case RSHIFT_EXPR:
if (unsignedp)
- return gen_rtx_LSHIFTRT (mode, op0, op1);
+ return simplify_gen_binary (LSHIFTRT, mode, op0, op1);
else
- return gen_rtx_ASHIFTRT (mode, op0, op1);
+ return simplify_gen_binary (ASHIFTRT, mode, op0, op1);
case LROTATE_EXPR:
- return gen_rtx_ROTATE (mode, op0, op1);
+ return simplify_gen_binary (ROTATE, mode, op0, op1);
case RROTATE_EXPR:
- return gen_rtx_ROTATERT (mode, op0, op1);
+ return simplify_gen_binary (ROTATERT, mode, op0, op1);
case MIN_EXPR:
- if (unsignedp)
- return gen_rtx_UMIN (mode, op0, op1);
- else
- return gen_rtx_SMIN (mode, op0, op1);
+ return simplify_gen_binary (unsignedp ? UMIN : SMIN, mode, op0, op1);
case MAX_EXPR:
- if (unsignedp)
- return gen_rtx_UMAX (mode, op0, op1);
- else
- return gen_rtx_SMAX (mode, op0, op1);
+ return simplify_gen_binary (unsignedp ? UMAX : SMAX, mode, op0, op1);
case BIT_AND_EXPR:
case TRUTH_AND_EXPR:
- return gen_rtx_AND (mode, op0, op1);
+ return simplify_gen_binary (AND, mode, op0, op1);
case BIT_IOR_EXPR:
case TRUTH_OR_EXPR:
- return gen_rtx_IOR (mode, op0, op1);
+ return simplify_gen_binary (IOR, mode, op0, op1);
case BIT_XOR_EXPR:
case TRUTH_XOR_EXPR:
- return gen_rtx_XOR (mode, op0, op1);
+ return simplify_gen_binary (XOR, mode, op0, op1);
case TRUTH_ANDIF_EXPR:
return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
case TRUTH_NOT_EXPR:
- return gen_rtx_EQ (mode, op0, const0_rtx);
+ return simplify_gen_relational (EQ, mode, inner_mode, op0, const0_rtx);
case LT_EXPR:
- if (unsignedp)
- return gen_rtx_LTU (mode, op0, op1);
- else
- return gen_rtx_LT (mode, op0, op1);
+ return simplify_gen_relational (unsignedp ? LTU : LT, mode, inner_mode,
+ op0, op1);
case LE_EXPR:
- if (unsignedp)
- return gen_rtx_LEU (mode, op0, op1);
- else
- return gen_rtx_LE (mode, op0, op1);
+ return simplify_gen_relational (unsignedp ? LEU : LE, mode, inner_mode,
+ op0, op1);
case GT_EXPR:
- if (unsignedp)
- return gen_rtx_GTU (mode, op0, op1);
- else
- return gen_rtx_GT (mode, op0, op1);
+ return simplify_gen_relational (unsignedp ? GTU : GT, mode, inner_mode,
+ op0, op1);
case GE_EXPR:
- if (unsignedp)
- return gen_rtx_GEU (mode, op0, op1);
- else
- return gen_rtx_GE (mode, op0, op1);
+ return simplify_gen_relational (unsignedp ? GEU : GE, mode, inner_mode,
+ op0, op1);
case EQ_EXPR:
- return gen_rtx_EQ (mode, op0, op1);
+ return simplify_gen_relational (EQ, mode, inner_mode, op0, op1);
case NE_EXPR:
- return gen_rtx_NE (mode, op0, op1);
+ return simplify_gen_relational (NE, mode, inner_mode, op0, op1);
case UNORDERED_EXPR:
- return gen_rtx_UNORDERED (mode, op0, op1);
+ return simplify_gen_relational (UNORDERED, mode, inner_mode, op0, op1);
case ORDERED_EXPR:
- return gen_rtx_ORDERED (mode, op0, op1);
+ return simplify_gen_relational (ORDERED, mode, inner_mode, op0, op1);
case UNLT_EXPR:
- return gen_rtx_UNLT (mode, op0, op1);
+ return simplify_gen_relational (UNLT, mode, inner_mode, op0, op1);
case UNLE_EXPR:
- return gen_rtx_UNLE (mode, op0, op1);
+ return simplify_gen_relational (UNLE, mode, inner_mode, op0, op1);
case UNGT_EXPR:
- return gen_rtx_UNGT (mode, op0, op1);
+ return simplify_gen_relational (UNGT, mode, inner_mode, op0, op1);
case UNGE_EXPR:
- return gen_rtx_UNGE (mode, op0, op1);
+ return simplify_gen_relational (UNGE, mode, inner_mode, op0, op1);
case UNEQ_EXPR:
- return gen_rtx_UNEQ (mode, op0, op1);
+ return simplify_gen_relational (UNEQ, mode, inner_mode, op0, op1);
case LTGT_EXPR:
- return gen_rtx_LTGT (mode, op0, op1);
+ return simplify_gen_relational (LTGT, mode, inner_mode, op0, op1);
case COND_EXPR:
return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
case CONJ_EXPR:
if (GET_CODE (op0) == CONCAT)
return gen_rtx_CONCAT (mode, XEXP (op0, 0),
- gen_rtx_NEG (GET_MODE_INNER (mode),
- XEXP (op0, 1)));
+ simplify_gen_unary (NEG, GET_MODE_INNER (mode),
+ XEXP (op0, 1),
+ GET_MODE_INNER (mode)));
else
{
enum machine_mode imode = GET_MODE_INNER (mode);
if (SSA_NAME_IS_DEFAULT_DEF (exp)
&& TREE_CODE (SSA_NAME_VAR (exp)) == PARM_DECL)
{
- rtx incoming = DECL_INCOMING_RTL (SSA_NAME_VAR (exp));
- if (incoming
- && GET_MODE (incoming) != BLKmode
- && ((REG_P (incoming) && HARD_REGISTER_P (incoming))
- || (MEM_P (incoming)
- && REG_P (XEXP (incoming, 0))
- && HARD_REGISTER_P (XEXP (incoming, 0)))))
- {
- op0 = gen_rtx_ENTRY_VALUE (GET_MODE (incoming));
- ENTRY_VALUE_EXP (op0) = incoming;
- goto adjust_mode;
- }
+ op0 = expand_debug_parm_decl (SSA_NAME_VAR (exp));
+ if (op0)
+ goto adjust_mode;
op0 = expand_debug_expr (SSA_NAME_VAR (exp));
- if (!op0)
- return NULL;
- goto adjust_mode;
+ if (op0)
+ goto adjust_mode;
}
return NULL;
}
case VEC_UNPACK_LO_EXPR:
case VEC_WIDEN_MULT_HI_EXPR:
case VEC_WIDEN_MULT_LO_EXPR:
+ case VEC_WIDEN_LSHIFT_HI_EXPR:
+ case VEC_WIDEN_LSHIFT_LO_EXPR:
+ case VEC_PERM_EXPR:
return NULL;
/* Misc codes. */
if (SCALAR_INT_MODE_P (GET_MODE (op0))
&& SCALAR_INT_MODE_P (mode))
{
- if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
- op0 = gen_rtx_ZERO_EXTEND (mode, op0);
- else
- op0 = gen_rtx_SIGN_EXTEND (mode, op0);
- if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
- op1 = gen_rtx_ZERO_EXTEND (mode, op1);
- else
- op1 = gen_rtx_SIGN_EXTEND (mode, op1);
- op0 = gen_rtx_MULT (mode, op0, op1);
- return gen_rtx_PLUS (mode, op0, op2);
+ op0
+ = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
+ 0)))
+ ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
+ inner_mode);
+ op1
+ = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
+ 1)))
+ ? ZERO_EXTEND : SIGN_EXTEND, mode, op1,
+ inner_mode);
+ op0 = simplify_gen_binary (MULT, mode, op0, op1);
+ return simplify_gen_binary (PLUS, mode, op0, op2);
}
return NULL;
if (SCALAR_INT_MODE_P (GET_MODE (op0))
&& SCALAR_INT_MODE_P (mode))
{
- enum machine_mode inner_mode = GET_MODE (op0);
+ inner_mode = GET_MODE (op0);
if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
else
op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode);
else
op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode);
- op0 = gen_rtx_MULT (mode, op0, op1);
+ op0 = simplify_gen_binary (MULT, mode, op0, op1);
if (TREE_CODE (exp) == WIDEN_MULT_EXPR)
return op0;
else if (TREE_CODE (exp) == WIDEN_MULT_PLUS_EXPR)
- return gen_rtx_PLUS (mode, op0, op2);
+ return simplify_gen_binary (PLUS, mode, op0, op2);
else
- return gen_rtx_MINUS (mode, op2, op0);
+ return simplify_gen_binary (MINUS, mode, op2, op0);
}
return NULL;
case WIDEN_SUM_EXPR:
+ case WIDEN_LSHIFT_EXPR:
if (SCALAR_INT_MODE_P (GET_MODE (op0))
&& SCALAR_INT_MODE_P (mode))
{
- if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
- op0 = gen_rtx_ZERO_EXTEND (mode, op0);
- else
- op0 = gen_rtx_SIGN_EXTEND (mode, op0);
- return gen_rtx_PLUS (mode, op0, op1);
+ op0
+ = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
+ 0)))
+ ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
+ inner_mode);
+ return simplify_gen_binary (TREE_CODE (exp) == WIDEN_LSHIFT_EXPR
+ ? ASHIFT : PLUS, mode, op0, op1);
}
return NULL;
case FMA_EXPR:
- return gen_rtx_FMA (mode, op0, op1, op2);
+ return simplify_gen_ternary (FMA, mode, inner_mode, op0, op1, op2);
default:
flag_unsupported:
}
}
+/* Return an RTX equivalent to the source bind value of the tree expression
+ EXP. */
+
+static rtx
+expand_debug_source_expr (tree exp)
+{
+ rtx op0 = NULL_RTX;
+ enum machine_mode mode = VOIDmode, inner_mode;
+
+ switch (TREE_CODE (exp))
+ {
+ case PARM_DECL:
+ {
+ mode = DECL_MODE (exp);
+ op0 = expand_debug_parm_decl (exp);
+ if (op0)
+ break;
+ /* See if this isn't an argument that has been completely
+ optimized out. */
+ if (!DECL_RTL_SET_P (exp)
+ && !DECL_INCOMING_RTL (exp)
+ && DECL_ABSTRACT_ORIGIN (current_function_decl))
+ {
+ tree aexp = exp;
+ if (DECL_ABSTRACT_ORIGIN (exp))
+ aexp = DECL_ABSTRACT_ORIGIN (exp);
+ if (DECL_CONTEXT (aexp)
+ == DECL_ABSTRACT_ORIGIN (current_function_decl))
+ {
+ VEC(tree, gc) **debug_args;
+ unsigned int ix;
+ tree ddecl;
+#ifdef ENABLE_CHECKING
+ tree parm;
+ for (parm = DECL_ARGUMENTS (current_function_decl);
+ parm; parm = DECL_CHAIN (parm))
+ gcc_assert (parm != exp
+ && DECL_ABSTRACT_ORIGIN (parm) != aexp);
+#endif
+ debug_args = decl_debug_args_lookup (current_function_decl);
+ if (debug_args != NULL)
+ {
+ for (ix = 0; VEC_iterate (tree, *debug_args, ix, ddecl);
+ ix += 2)
+ if (ddecl == aexp)
+ return gen_rtx_DEBUG_PARAMETER_REF (mode, aexp);
+ }
+ }
+ }
+ break;
+ }
+ default:
+ break;
+ }
+
+ if (op0 == NULL_RTX)
+ return NULL_RTX;
+
+ inner_mode = GET_MODE (op0);
+ if (mode == inner_mode)
+ return op0;
+
+ if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode))
+ {
+ if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode))
+ op0 = simplify_gen_subreg (mode, op0, inner_mode, 0);
+ else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode))
+ op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode);
+ else
+ op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode);
+ }
+ else if (FLOAT_MODE_P (mode))
+ gcc_unreachable ();
+ else if (FLOAT_MODE_P (inner_mode))
+ {
+ if (TYPE_UNSIGNED (TREE_TYPE (exp)))
+ op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode);
+ else
+ op0 = simplify_gen_unary (FIX, mode, op0, inner_mode);
+ }
+ else if (CONSTANT_P (op0)
+ || GET_MODE_BITSIZE (mode) <= GET_MODE_BITSIZE (inner_mode))
+ op0 = simplify_gen_subreg (mode, op0, inner_mode,
+ subreg_lowpart_offset (mode, inner_mode));
+ else if (TYPE_UNSIGNED (TREE_TYPE (exp)))
+ op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
+ else
+ op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
+
+ return op0;
+}
+
/* Expand the _LOCs in debug insns. We run this after expanding all
regular insns, so that any variables referenced in the function
will have their DECL_RTLs set. */
val = NULL_RTX;
else
{
- val = expand_debug_expr (value);
+ if (INSN_VAR_LOCATION_STATUS (insn)
+ == VAR_INIT_STATUS_UNINITIALIZED)
+ val = expand_debug_source_expr (value);
+ else
+ val = expand_debug_expr (value);
gcc_assert (last == get_last_insn ());
}
set_curr_insn_source_location (sloc);
set_curr_insn_block (sblock);
}
+ else if (gimple_debug_source_bind_p (stmt))
+ {
+ location_t sloc = get_curr_insn_source_location ();
+ tree sblock = get_curr_insn_block ();
+ tree var = gimple_debug_source_bind_get_var (stmt);
+ tree value = gimple_debug_source_bind_get_value (stmt);
+ rtx val;
+ enum machine_mode mode;
+
+ last = get_last_insn ();
+
+ set_curr_insn_source_location (gimple_location (stmt));
+ set_curr_insn_block (gimple_block (stmt));
+
+ mode = DECL_MODE (var);
+
+ val = gen_rtx_VAR_LOCATION (mode, var, (rtx)value,
+ VAR_INIT_STATUS_UNINITIALIZED);
+
+ emit_debug_insn (val);
+
+ if (dump_file && (dump_flags & TDF_DETAILS))
+ {
+ /* We can't dump the insn with a TREE where an RTX
+ is expected. */
+ PAT_VAR_LOCATION_LOC (val) = const0_rtx;
+ maybe_dump_rtl_for_gimple_stmt (stmt, last);
+ PAT_VAR_LOCATION_LOC (val) = (rtx)value;
+ }
+
+ set_curr_insn_source_location (sloc);
+ set_curr_insn_block (sblock);
+ }
else
{
if (is_gimple_call (stmt) && gimple_call_tail_p (stmt))
}
}
+ /* If we have a class containing differently aligned pointers
+ we need to merge those into the corresponding RTL pointer
+ alignment. */
+ for (i = 1; i < num_ssa_names; i++)
+ {
+ tree name = ssa_name (i);
+ int part;
+ rtx r;
+
+ if (!name
+ || !POINTER_TYPE_P (TREE_TYPE (name))
+ /* We might have generated new SSA names in
+ update_alias_info_with_stack_vars. They will have a NULL
+ defining statements, and won't be part of the partitioning,
+ so ignore those. */
+ || !SSA_NAME_DEF_STMT (name))
+ continue;
+ part = var_to_partition (SA.map, name);
+ if (part == NO_PARTITION)
+ continue;
+ r = SA.partition_to_pseudo[part];
+ if (REG_P (r))
+ mark_reg_pointer (r, get_pointer_alignment (name));
+ }
+
/* If this function is `main', emit a call to `__main'
to run global initializers, etc. */
if (DECL_NAME (current_function_decl)
PROP_ssa | PROP_trees, /* properties_destroyed */
TODO_verify_ssa | TODO_verify_flow
| TODO_verify_stmts, /* todo_flags_start */
- TODO_dump_func
- | TODO_ggc_collect /* todo_flags_finish */
+ TODO_ggc_collect /* todo_flags_finish */
}
};