/* The Variable. */
tree decl;
- /* The offset of the variable. During partitioning, this is the
- offset relative to the partition. After partitioning, this
- is relative to the stack frame. */
- HOST_WIDE_INT offset;
-
/* Initially, the size of the variable. Later, the size of the partition,
if this variable becomes it's partition's representative. */
HOST_WIDE_INT size;
v = &stack_vars[stack_vars_num];
v->decl = decl;
- v->offset = 0;
v->size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (decl)), 1);
/* Ensure that all variables have size, so that &a != &b for any two
variables that are simultaneously live. */
to elements will conflict. In case of unions we have
to be careful as type based aliasing rules may say
access to the same memory does not conflict. So play
- safe and add a conflict in this case. */
- || contains_union)
+ safe and add a conflict in this case when
+ -fstrict-aliasing is used. */
+ || (contains_union && flag_strict_aliasing))
add_stack_var_conflict (i, j);
}
}
return (int)largeb - (int)largea;
/* Secondary compare on size, decreasing */
- if (sizea < sizeb)
- return -1;
if (sizea > sizeb)
+ return -1;
+ if (sizea < sizeb)
return 1;
/* Tertiary compare on true alignment, decreasing. */
/* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
partitioning algorithm. Partitions A and B are known to be non-conflicting.
- Merge them into a single partition A.
-
- At the same time, add OFFSET to all variables in partition B. At the end
- of the partitioning process we've have a nice block easy to lay out within
- the stack frame. */
+ Merge them into a single partition A. */
static void
-union_stack_vars (size_t a, size_t b, HOST_WIDE_INT offset)
+union_stack_vars (size_t a, size_t b)
{
- size_t i, last;
struct stack_var *vb = &stack_vars[b];
bitmap_iterator bi;
unsigned u;
- /* Update each element of partition B with the given offset,
- and merge them into partition A. */
- for (last = i = b; i != EOC; last = i, i = stack_vars[i].next)
- {
- stack_vars[i].offset += offset;
- stack_vars[i].representative = a;
- }
- stack_vars[last].next = stack_vars[a].next;
+ gcc_assert (stack_vars[b].next == EOC);
+ /* Add B to A's partition. */
+ stack_vars[b].next = stack_vars[a].next;
+ stack_vars[b].representative = a;
stack_vars[a].next = b;
/* Update the required alignment of partition A to account for B. */
partitions constrained by the interference graph. The overall
algorithm used is as follows:
- Sort the objects by size.
+ Sort the objects by size in descending order.
For each object A {
S = size(A)
O = 0
loop {
Look for the largest non-conflicting object B with size <= S.
UNION (A, B)
- offset(B) = O
- O += size(B)
- S -= size(B)
}
}
*/
for (si = 0; si < n; ++si)
{
size_t i = stack_vars_sorted[si];
- HOST_WIDE_INT isize = stack_vars[i].size;
unsigned int ialign = stack_vars[i].alignb;
- HOST_WIDE_INT offset = 0;
- for (sj = si; sj-- > 0; )
+ /* Ignore objects that aren't partition representatives. If we
+ see a var that is not a partition representative, it must
+ have been merged earlier. */
+ if (stack_vars[i].representative != i)
+ continue;
+
+ for (sj = si + 1; sj < n; ++sj)
{
size_t j = stack_vars_sorted[sj];
- HOST_WIDE_INT jsize = stack_vars[j].size;
unsigned int jalign = stack_vars[j].alignb;
/* Ignore objects that aren't partition representatives. */
if (stack_vars[j].representative != j)
continue;
- /* Ignore objects too large for the remaining space. */
- if (isize < jsize)
- continue;
-
/* Ignore conflicting objects. */
if (stack_var_conflict_p (i, j))
continue;
!= (jalign * BITS_PER_UNIT <= MAX_SUPPORTED_STACK_ALIGNMENT))
continue;
- /* Refine the remaining space check to include alignment. */
- if (offset & (jalign - 1))
- {
- HOST_WIDE_INT toff = offset;
- toff += jalign - 1;
- toff &= -(HOST_WIDE_INT)jalign;
- if (isize - (toff - offset) < jsize)
- continue;
-
- isize -= toff - offset;
- offset = toff;
- }
-
/* UNION the objects, placing J at OFFSET. */
- union_stack_vars (i, j, offset);
-
- isize -= jsize;
- if (isize == 0)
- break;
+ union_stack_vars (i, j);
}
}
{
fputc ('\t', dump_file);
print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
- fprintf (dump_file, ", offset " HOST_WIDE_INT_PRINT_DEC "\n",
- stack_vars[j].offset);
}
+ fputc ('\n', dump_file);
}
}
partition. */
for (j = i; j != EOC; j = stack_vars[j].next)
{
- gcc_assert (stack_vars[j].offset <= stack_vars[i].size);
expand_one_stack_var_at (stack_vars[j].decl,
base, base_align,
- stack_vars[j].offset + offset);
+ offset);
}
}
static void
expand_call_stmt (gimple stmt)
{
- tree exp, decl, lhs = gimple_call_lhs (stmt);
+ tree exp, decl, lhs;
bool builtin_p;
size_t i;
+ if (gimple_call_internal_p (stmt))
+ {
+ expand_internal_call (stmt);
+ return;
+ }
+
exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
CALL_EXPR_FN (exp) = gimple_call_fn (stmt);
CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt);
CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt);
- CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
+ if (decl
+ && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
+ && DECL_FUNCTION_CODE (decl) == BUILT_IN_ALLOCA)
+ CALL_ALLOCA_FOR_VAR_P (exp) = gimple_call_alloca_for_var_p (stmt);
+ else
+ CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt);
CALL_CANNOT_INLINE_P (exp) = gimple_call_cannot_inline_p (stmt);
CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt);
SET_EXPR_LOCATION (exp, gimple_location (stmt));
TREE_BLOCK (exp) = gimple_block (stmt);
+ lhs = gimple_call_lhs (stmt);
if (lhs)
expand_assignment (lhs, exp, false);
else
{
rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX;
enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
+ enum machine_mode inner_mode = VOIDmode;
int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
addr_space_t as;
unary:
case tcc_unary:
+ inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
op0 = expand_debug_expr (TREE_OPERAND (exp, 0));
if (!op0)
return NULL_RTX;
|| !TREE_STATIC (exp)
|| !DECL_NAME (exp)
|| DECL_HARD_REGISTER (exp)
+ || DECL_IN_CONSTANT_POOL (exp)
|| mode == VOIDmode)
return NULL;
case NOP_EXPR:
case CONVERT_EXPR:
{
- enum machine_mode inner_mode = GET_MODE (op0);
+ inner_mode = GET_MODE (op0);
if (mode == inner_mode)
return op0;
else if (TREE_CODE_CLASS (TREE_CODE (exp)) == tcc_unary
? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
: unsignedp)
- op0 = gen_rtx_ZERO_EXTEND (mode, op0);
+ op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
else
- op0 = gen_rtx_SIGN_EXTEND (mode, op0);
+ op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode);
return op0;
}
/* Don't use offset_address here, we don't need a
recognizable address, and we don't want to generate
code. */
- op0 = gen_rtx_MEM (mode, gen_rtx_PLUS (addrmode, op0, op1));
+ op0 = gen_rtx_MEM (mode, simplify_gen_binary (PLUS, addrmode,
+ op0, op1));
}
if (MEM_P (op0))
}
case ABS_EXPR:
- return gen_rtx_ABS (mode, op0);
+ return simplify_gen_unary (ABS, mode, op0, mode);
case NEGATE_EXPR:
- return gen_rtx_NEG (mode, op0);
+ return simplify_gen_unary (NEG, mode, op0, mode);
case BIT_NOT_EXPR:
- return gen_rtx_NOT (mode, op0);
+ return simplify_gen_unary (NOT, mode, op0, mode);
case FLOAT_EXPR:
- if (unsignedp)
- return gen_rtx_UNSIGNED_FLOAT (mode, op0);
- else
- return gen_rtx_FLOAT (mode, op0);
+ return simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
+ 0)))
+ ? UNSIGNED_FLOAT : FLOAT, mode, op0,
+ inner_mode);
case FIX_TRUNC_EXPR:
- if (unsignedp)
- return gen_rtx_UNSIGNED_FIX (mode, op0);
- else
- return gen_rtx_FIX (mode, op0);
+ return simplify_gen_unary (unsignedp ? UNSIGNED_FIX : FIX, mode, op0,
+ inner_mode);
case POINTER_PLUS_EXPR:
/* For the rare target where pointers are not the same size as
&& GET_MODE (op0) != GET_MODE (op1))
{
if (GET_MODE_BITSIZE (GET_MODE (op0)) < GET_MODE_BITSIZE (GET_MODE (op1)))
- op1 = gen_rtx_TRUNCATE (GET_MODE (op0), op1);
+ op1 = simplify_gen_unary (TRUNCATE, GET_MODE (op0), op1,
+ GET_MODE (op1));
else
/* We always sign-extend, regardless of the signedness of
the operand, because the operand is always unsigned
here even if the original C expression is signed. */
- op1 = gen_rtx_SIGN_EXTEND (GET_MODE (op0), op1);
+ op1 = simplify_gen_unary (SIGN_EXTEND, GET_MODE (op0), op1,
+ GET_MODE (op1));
}
/* Fall through. */
case PLUS_EXPR:
- return gen_rtx_PLUS (mode, op0, op1);
+ return simplify_gen_binary (PLUS, mode, op0, op1);
case MINUS_EXPR:
- return gen_rtx_MINUS (mode, op0, op1);
+ return simplify_gen_binary (MINUS, mode, op0, op1);
case MULT_EXPR:
- return gen_rtx_MULT (mode, op0, op1);
+ return simplify_gen_binary (MULT, mode, op0, op1);
case RDIV_EXPR:
case TRUNC_DIV_EXPR:
case EXACT_DIV_EXPR:
if (unsignedp)
- return gen_rtx_UDIV (mode, op0, op1);
+ return simplify_gen_binary (UDIV, mode, op0, op1);
else
- return gen_rtx_DIV (mode, op0, op1);
+ return simplify_gen_binary (DIV, mode, op0, op1);
case TRUNC_MOD_EXPR:
- if (unsignedp)
- return gen_rtx_UMOD (mode, op0, op1);
- else
- return gen_rtx_MOD (mode, op0, op1);
+ return simplify_gen_binary (unsignedp ? UMOD : MOD, mode, op0, op1);
case FLOOR_DIV_EXPR:
if (unsignedp)
- return gen_rtx_UDIV (mode, op0, op1);
+ return simplify_gen_binary (UDIV, mode, op0, op1);
else
{
- rtx div = gen_rtx_DIV (mode, op0, op1);
- rtx mod = gen_rtx_MOD (mode, op0, op1);
+ rtx div = simplify_gen_binary (DIV, mode, op0, op1);
+ rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
rtx adj = floor_sdiv_adjust (mode, mod, op1);
- return gen_rtx_PLUS (mode, div, adj);
+ return simplify_gen_binary (PLUS, mode, div, adj);
}
case FLOOR_MOD_EXPR:
if (unsignedp)
- return gen_rtx_UMOD (mode, op0, op1);
+ return simplify_gen_binary (UMOD, mode, op0, op1);
else
{
- rtx mod = gen_rtx_MOD (mode, op0, op1);
+ rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
rtx adj = floor_sdiv_adjust (mode, mod, op1);
- adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
- return gen_rtx_PLUS (mode, mod, adj);
+ adj = simplify_gen_unary (NEG, mode,
+ simplify_gen_binary (MULT, mode, adj, op1),
+ mode);
+ return simplify_gen_binary (PLUS, mode, mod, adj);
}
case CEIL_DIV_EXPR:
if (unsignedp)
{
- rtx div = gen_rtx_UDIV (mode, op0, op1);
- rtx mod = gen_rtx_UMOD (mode, op0, op1);
+ rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
+ rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
rtx adj = ceil_udiv_adjust (mode, mod, op1);
- return gen_rtx_PLUS (mode, div, adj);
+ return simplify_gen_binary (PLUS, mode, div, adj);
}
else
{
- rtx div = gen_rtx_DIV (mode, op0, op1);
- rtx mod = gen_rtx_MOD (mode, op0, op1);
+ rtx div = simplify_gen_binary (DIV, mode, op0, op1);
+ rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
rtx adj = ceil_sdiv_adjust (mode, mod, op1);
- return gen_rtx_PLUS (mode, div, adj);
+ return simplify_gen_binary (PLUS, mode, div, adj);
}
case CEIL_MOD_EXPR:
if (unsignedp)
{
- rtx mod = gen_rtx_UMOD (mode, op0, op1);
+ rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
rtx adj = ceil_udiv_adjust (mode, mod, op1);
- adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
- return gen_rtx_PLUS (mode, mod, adj);
+ adj = simplify_gen_unary (NEG, mode,
+ simplify_gen_binary (MULT, mode, adj, op1),
+ mode);
+ return simplify_gen_binary (PLUS, mode, mod, adj);
}
else
{
- rtx mod = gen_rtx_MOD (mode, op0, op1);
+ rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
rtx adj = ceil_sdiv_adjust (mode, mod, op1);
- adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
- return gen_rtx_PLUS (mode, mod, adj);
+ adj = simplify_gen_unary (NEG, mode,
+ simplify_gen_binary (MULT, mode, adj, op1),
+ mode);
+ return simplify_gen_binary (PLUS, mode, mod, adj);
}
case ROUND_DIV_EXPR:
if (unsignedp)
{
- rtx div = gen_rtx_UDIV (mode, op0, op1);
- rtx mod = gen_rtx_UMOD (mode, op0, op1);
+ rtx div = simplify_gen_binary (UDIV, mode, op0, op1);
+ rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
rtx adj = round_udiv_adjust (mode, mod, op1);
- return gen_rtx_PLUS (mode, div, adj);
+ return simplify_gen_binary (PLUS, mode, div, adj);
}
else
{
- rtx div = gen_rtx_DIV (mode, op0, op1);
- rtx mod = gen_rtx_MOD (mode, op0, op1);
+ rtx div = simplify_gen_binary (DIV, mode, op0, op1);
+ rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
rtx adj = round_sdiv_adjust (mode, mod, op1);
- return gen_rtx_PLUS (mode, div, adj);
+ return simplify_gen_binary (PLUS, mode, div, adj);
}
case ROUND_MOD_EXPR:
if (unsignedp)
{
- rtx mod = gen_rtx_UMOD (mode, op0, op1);
+ rtx mod = simplify_gen_binary (UMOD, mode, op0, op1);
rtx adj = round_udiv_adjust (mode, mod, op1);
- adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
- return gen_rtx_PLUS (mode, mod, adj);
+ adj = simplify_gen_unary (NEG, mode,
+ simplify_gen_binary (MULT, mode, adj, op1),
+ mode);
+ return simplify_gen_binary (PLUS, mode, mod, adj);
}
else
{
- rtx mod = gen_rtx_MOD (mode, op0, op1);
+ rtx mod = simplify_gen_binary (MOD, mode, op0, op1);
rtx adj = round_sdiv_adjust (mode, mod, op1);
- adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1));
- return gen_rtx_PLUS (mode, mod, adj);
+ adj = simplify_gen_unary (NEG, mode,
+ simplify_gen_binary (MULT, mode, adj, op1),
+ mode);
+ return simplify_gen_binary (PLUS, mode, mod, adj);
}
case LSHIFT_EXPR:
- return gen_rtx_ASHIFT (mode, op0, op1);
+ return simplify_gen_binary (ASHIFT, mode, op0, op1);
case RSHIFT_EXPR:
if (unsignedp)
- return gen_rtx_LSHIFTRT (mode, op0, op1);
+ return simplify_gen_binary (LSHIFTRT, mode, op0, op1);
else
- return gen_rtx_ASHIFTRT (mode, op0, op1);
+ return simplify_gen_binary (ASHIFTRT, mode, op0, op1);
case LROTATE_EXPR:
- return gen_rtx_ROTATE (mode, op0, op1);
+ return simplify_gen_binary (ROTATE, mode, op0, op1);
case RROTATE_EXPR:
- return gen_rtx_ROTATERT (mode, op0, op1);
+ return simplify_gen_binary (ROTATERT, mode, op0, op1);
case MIN_EXPR:
- if (unsignedp)
- return gen_rtx_UMIN (mode, op0, op1);
- else
- return gen_rtx_SMIN (mode, op0, op1);
+ return simplify_gen_binary (unsignedp ? UMIN : SMIN, mode, op0, op1);
case MAX_EXPR:
- if (unsignedp)
- return gen_rtx_UMAX (mode, op0, op1);
- else
- return gen_rtx_SMAX (mode, op0, op1);
+ return simplify_gen_binary (unsignedp ? UMAX : SMAX, mode, op0, op1);
case BIT_AND_EXPR:
case TRUTH_AND_EXPR:
- return gen_rtx_AND (mode, op0, op1);
+ return simplify_gen_binary (AND, mode, op0, op1);
case BIT_IOR_EXPR:
case TRUTH_OR_EXPR:
- return gen_rtx_IOR (mode, op0, op1);
+ return simplify_gen_binary (IOR, mode, op0, op1);
case BIT_XOR_EXPR:
case TRUTH_XOR_EXPR:
- return gen_rtx_XOR (mode, op0, op1);
+ return simplify_gen_binary (XOR, mode, op0, op1);
case TRUTH_ANDIF_EXPR:
return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx);
return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1);
case TRUTH_NOT_EXPR:
- return gen_rtx_EQ (mode, op0, const0_rtx);
+ return simplify_gen_relational (EQ, mode, inner_mode, op0, const0_rtx);
case LT_EXPR:
- if (unsignedp)
- return gen_rtx_LTU (mode, op0, op1);
- else
- return gen_rtx_LT (mode, op0, op1);
+ return simplify_gen_relational (unsignedp ? LTU : LT, mode, inner_mode,
+ op0, op1);
case LE_EXPR:
- if (unsignedp)
- return gen_rtx_LEU (mode, op0, op1);
- else
- return gen_rtx_LE (mode, op0, op1);
+ return simplify_gen_relational (unsignedp ? LEU : LE, mode, inner_mode,
+ op0, op1);
case GT_EXPR:
- if (unsignedp)
- return gen_rtx_GTU (mode, op0, op1);
- else
- return gen_rtx_GT (mode, op0, op1);
+ return simplify_gen_relational (unsignedp ? GTU : GT, mode, inner_mode,
+ op0, op1);
case GE_EXPR:
- if (unsignedp)
- return gen_rtx_GEU (mode, op0, op1);
- else
- return gen_rtx_GE (mode, op0, op1);
+ return simplify_gen_relational (unsignedp ? GEU : GE, mode, inner_mode,
+ op0, op1);
case EQ_EXPR:
- return gen_rtx_EQ (mode, op0, op1);
+ return simplify_gen_relational (EQ, mode, inner_mode, op0, op1);
case NE_EXPR:
- return gen_rtx_NE (mode, op0, op1);
+ return simplify_gen_relational (NE, mode, inner_mode, op0, op1);
case UNORDERED_EXPR:
- return gen_rtx_UNORDERED (mode, op0, op1);
+ return simplify_gen_relational (UNORDERED, mode, inner_mode, op0, op1);
case ORDERED_EXPR:
- return gen_rtx_ORDERED (mode, op0, op1);
+ return simplify_gen_relational (ORDERED, mode, inner_mode, op0, op1);
case UNLT_EXPR:
- return gen_rtx_UNLT (mode, op0, op1);
+ return simplify_gen_relational (UNLT, mode, inner_mode, op0, op1);
case UNLE_EXPR:
- return gen_rtx_UNLE (mode, op0, op1);
+ return simplify_gen_relational (UNLE, mode, inner_mode, op0, op1);
case UNGT_EXPR:
- return gen_rtx_UNGT (mode, op0, op1);
+ return simplify_gen_relational (UNGT, mode, inner_mode, op0, op1);
case UNGE_EXPR:
- return gen_rtx_UNGE (mode, op0, op1);
+ return simplify_gen_relational (UNGE, mode, inner_mode, op0, op1);
case UNEQ_EXPR:
- return gen_rtx_UNEQ (mode, op0, op1);
+ return simplify_gen_relational (UNEQ, mode, inner_mode, op0, op1);
case LTGT_EXPR:
- return gen_rtx_LTGT (mode, op0, op1);
+ return simplify_gen_relational (LTGT, mode, inner_mode, op0, op1);
case COND_EXPR:
return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2);
case CONJ_EXPR:
if (GET_CODE (op0) == CONCAT)
return gen_rtx_CONCAT (mode, XEXP (op0, 0),
- gen_rtx_NEG (GET_MODE_INNER (mode),
- XEXP (op0, 1)));
+ simplify_gen_unary (NEG, GET_MODE_INNER (mode),
+ XEXP (op0, 1),
+ GET_MODE_INNER (mode)));
else
{
enum machine_mode imode = GET_MODE_INNER (mode);
if (SCALAR_INT_MODE_P (GET_MODE (op0))
&& SCALAR_INT_MODE_P (mode))
{
- if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
- op0 = gen_rtx_ZERO_EXTEND (mode, op0);
- else
- op0 = gen_rtx_SIGN_EXTEND (mode, op0);
- if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1))))
- op1 = gen_rtx_ZERO_EXTEND (mode, op1);
- else
- op1 = gen_rtx_SIGN_EXTEND (mode, op1);
- op0 = gen_rtx_MULT (mode, op0, op1);
- return gen_rtx_PLUS (mode, op0, op2);
+ op0
+ = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
+ 0)))
+ ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
+ inner_mode);
+ op1
+ = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
+ 1)))
+ ? ZERO_EXTEND : SIGN_EXTEND, mode, op1,
+ inner_mode);
+ op0 = simplify_gen_binary (MULT, mode, op0, op1);
+ return simplify_gen_binary (PLUS, mode, op0, op2);
}
return NULL;
if (SCALAR_INT_MODE_P (GET_MODE (op0))
&& SCALAR_INT_MODE_P (mode))
{
- enum machine_mode inner_mode = GET_MODE (op0);
+ inner_mode = GET_MODE (op0);
if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode);
else
op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode);
else
op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode);
- op0 = gen_rtx_MULT (mode, op0, op1);
+ op0 = simplify_gen_binary (MULT, mode, op0, op1);
if (TREE_CODE (exp) == WIDEN_MULT_EXPR)
return op0;
else if (TREE_CODE (exp) == WIDEN_MULT_PLUS_EXPR)
- return gen_rtx_PLUS (mode, op0, op2);
+ return simplify_gen_binary (PLUS, mode, op0, op2);
else
- return gen_rtx_MINUS (mode, op2, op0);
+ return simplify_gen_binary (MINUS, mode, op2, op0);
}
return NULL;
if (SCALAR_INT_MODE_P (GET_MODE (op0))
&& SCALAR_INT_MODE_P (mode))
{
- if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))))
- op0 = gen_rtx_ZERO_EXTEND (mode, op0);
- else
- op0 = gen_rtx_SIGN_EXTEND (mode, op0);
- return gen_rtx_PLUS (mode, op0, op1);
+ op0
+ = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp,
+ 0)))
+ ? ZERO_EXTEND : SIGN_EXTEND, mode, op0,
+ inner_mode);
+ return simplify_gen_binary (PLUS, mode, op0, op1);
}
return NULL;
case FMA_EXPR:
- return gen_rtx_FMA (mode, op0, op1, op2);
+ return simplify_gen_ternary (FMA, mode, inner_mode, op0, op1, op2);
default:
flag_unsupported: