{
int mode;
- for (mode = 0; mode < MAX_MACHINE_MODE; mode++)
+ for (mode = 0; mode < (int) MAX_MACHINE_MODE; mode++)
mode_to_const_map[mode] =
mode_to_load_map[mode] =
mode_to_store_map[mode] = neverneverland;
#define DEF_MODEMAP(SYM, CODE, UCODE, CONST, LOAD, STORE) \
- mode_to_const_map[(enum machine_mode) SYM] = CONST; \
- mode_to_load_map[(enum machine_mode) SYM] = LOAD; \
- mode_to_store_map[(enum machine_mode) SYM] = STORE;
+ mode_to_const_map[(int) SYM] = CONST; \
+ mode_to_load_map[(int) SYM] = LOAD; \
+ mode_to_store_map[(int) SYM] = STORE;
#include "modemap.def"
#undef DEF_MODEMAP
&& ((code = can_extend_p (to_mode, word_mode, unsignedp))
!= CODE_FOR_nothing))
{
+ if (GET_CODE (to) == REG)
+ emit_insn (gen_rtx (CLOBBER, VOIDmode, to));
convert_move (gen_lowpart (word_mode, to), from, unsignedp);
emit_unop_insn (code, to,
gen_lowpart (word_mode, to), equiv_code);
{
register rtx temp;
- if (GET_MODE (x) != VOIDmode)
- oldmode = GET_MODE (x);
-
/* If FROM is a SUBREG that indicates that we have already done at least
the required extension, strip it. */
&& SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
x = gen_lowpart (mode, x);
+ if (GET_MODE (x) != VOIDmode)
+ oldmode = GET_MODE (x);
+
if (mode == oldmode)
return x;
/* We can do this with a gen_lowpart if both desired and current modes
are integer, and this is either a constant integer, a register, or a
- non-volatile MEM. Except for the constant case, we must be narrowing
- the operand. */
+ non-volatile MEM. Except for the constant case where MODE is no
+ wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
- if (GET_CODE (x) == CONST_INT
+ if ((GET_CODE (x) == CONST_INT
+ && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
|| (GET_MODE_CLASS (mode) == MODE_INT
&& GET_MODE_CLASS (oldmode) == MODE_INT
&& (GET_CODE (x) == CONST_DOUBLE
&& ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
&& direct_load[(int) mode])
|| GET_CODE (x) == REG)))))
- return gen_lowpart (mode, x);
+ {
+ /* ?? If we don't know OLDMODE, we have to assume here that
+ X does not need sign- or zero-extension. This may not be
+ the case, but it's the best we can do. */
+ if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
+ && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
+ {
+ HOST_WIDE_INT val = INTVAL (x);
+ int width = GET_MODE_BITSIZE (oldmode);
+
+ /* We must sign or zero-extend in this case. Start by
+ zero-extending, then sign extend if we need to. */
+ val &= ((HOST_WIDE_INT) 1 << width) - 1;
+ if (! unsignedp
+ && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
+ val |= (HOST_WIDE_INT) (-1) << width;
+
+ return GEN_INT (val);
+ }
+
+ return gen_lowpart (mode, x);
+ }
temp = gen_reg_rtx (mode);
convert_move (temp, x, unsignedp);
/* See if the machine can do this with a load multiple insn. */
#ifdef HAVE_load_multiple
- last = get_last_insn ();
- pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
- GEN_INT (nregs));
- if (pat)
+ if (HAVE_load_multiple)
{
- emit_insn (pat);
- return;
+ last = get_last_insn ();
+ pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
+ GEN_INT (nregs));
+ if (pat)
+ {
+ emit_insn (pat);
+ return;
+ }
+ else
+ delete_insns_since (last);
}
- else
- delete_insns_since (last);
#endif
for (i = 0; i < nregs; i++)
/* See if the machine can do this with a store multiple insn. */
#ifdef HAVE_store_multiple
- last = get_last_insn ();
- pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
- GEN_INT (nregs));
- if (pat)
+ if (HAVE_store_multiple)
{
- emit_insn (pat);
- return;
+ last = get_last_insn ();
+ pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
+ GEN_INT (nregs));
+ if (pat)
+ {
+ emit_insn (pat);
+ return;
+ }
+ else
+ delete_insns_since (last);
}
- else
- delete_insns_since (last);
#endif
for (i = 0; i < nregs; i++)
the mode, not to change the address. */
if (stack)
{
+ /* Note that the real part always precedes the imag part in memory
+ regardless of machine's endianness. */
#ifdef STACK_GROWS_DOWNWARD
emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
(gen_rtx (MEM, submode, (XEXP (x, 0))),
- gen_highpart (submode, y)));
+ gen_imagpart (submode, y)));
emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
(gen_rtx (MEM, submode, (XEXP (x, 0))),
- gen_lowpart (submode, y)));
+ gen_realpart (submode, y)));
#else
emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
(gen_rtx (MEM, submode, (XEXP (x, 0))),
- gen_lowpart (submode, y)));
+ gen_realpart (submode, y)));
emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
(gen_rtx (MEM, submode, (XEXP (x, 0))),
- gen_highpart (submode, y)));
+ gen_imagpart (submode, y)));
#endif
}
else
(gen_lowpart (submode, x), gen_lowpart (submode, y)));
}
- group_insns (prev);
+ if (GET_CODE (x) != CONCAT)
+ /* If X is a CONCAT, we got insns like RD = RS, ID = IS,
+ each with a separate pseudo as destination.
+ It's not correct for flow to treat them as a unit. */
+ group_insns (prev);
return get_last_insn ();
}
tree dest_innermost;
bc_expand_expr (from);
- bc_emit_instruction (dup);
+ bc_emit_instruction (duplicate);
dest_innermost = bc_expand_address (to);
int unsignedp;
int volatilep = 0;
tree tem;
+ int alignment;
push_temp_slots ();
tem = get_inner_reference (to, &bitsize, &bitpos, &offset,
if (mode1 == VOIDmode && want_value)
tem = stabilize_reference (tem);
+ alignment = TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT;
to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
if (offset != 0)
{
to_rtx = change_address (to_rtx, VOIDmode,
gen_rtx (PLUS, Pmode, XEXP (to_rtx, 0),
force_reg (Pmode, offset_rtx)));
+ /* If we have a variable offset, the known alignment
+ is only that of the innermost structure containing the field.
+ (Actually, we could sometimes do better by using the
+ align of an element of the innermost array, but no need.) */
+ if (TREE_CODE (to) == COMPONENT_REF
+ || TREE_CODE (to) == BIT_FIELD_REF)
+ alignment
+ = TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (to, 0))) / BITS_PER_UNIT;
}
if (volatilep)
{
: VOIDmode),
unsignedp,
/* Required alignment of containing datum. */
- TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
+ alignment,
int_size_in_bytes (TREE_TYPE (tem)));
preserve_temp_slots (result);
free_temp_slots ();
call the function before we start to compute the lhs.
This is needed for correct code for cases such as
val = setjmp (buf) on machines where reference to val
- requires loading up part of an address in a separate insn. */
- if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from))
+ requires loading up part of an address in a separate insn.
+
+ Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
+ a promoted variable where the zero- or sign- extension needs to be done.
+ Handling this in the normal way is safe because no computation is done
+ before the call. */
+ if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
+ && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
{
rtx value;
rtx from_rtx, size;
push_temp_slots ();
- from_rtx = expr_size (from);
- size = expand_expr (from, NULL_RTX, VOIDmode, 0);
+ size = expr_size (from);
+ from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
#ifdef TARGET_MEM_FUNCTIONS
emit_library_call (memcpy_libfunc, 0,
expression. */
{
temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
+
+ /* If TEMP is a VOIDmode constant, use convert_modes to make
+ sure that we properly convert it. */
+ if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
+ temp = convert_modes (GET_MODE (SUBREG_REG (target)),
+ TYPE_MODE (TREE_TYPE (exp)), temp,
+ SUBREG_PROMOTED_UNSIGNED_P (target));
+
convert_move (SUBREG_REG (target), temp,
SUBREG_PROMOTED_UNSIGNED_P (target));
return want_value ? temp : NULL_RTX;
dont_return_target = 1;
}
+ /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
+ the same as that of TARGET, adjust the constant. This is needed, for
+ example, in case it is a CONST_DOUBLE and we want only a word-sized
+ value. */
+ if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
+ && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
+ temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
+ temp, TREE_UNSIGNED (TREE_TYPE (exp)));
+
/* If value was not generated in the target, store it there.
Convert the value to TARGET's type first if nec. */
/* Compute the size of the data to copy from the string. */
tree copy_size
= size_binop (MIN_EXPR,
- size_binop (CEIL_DIV_EXPR,
- TYPE_SIZE (TREE_TYPE (exp)),
- size_int (BITS_PER_UNIT)),
+ make_tree (sizetype, size),
convert (sizetype,
build_int_2 (TREE_STRING_LENGTH (exp), 0)));
rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
register tree field = TREE_PURPOSE (elt);
register enum machine_mode mode;
int bitsize;
- int bitpos;
+ int bitpos = 0;
int unsignedp;
+ tree pos, constant = 0, offset = 0;
+ rtx to_rtx = target;
/* Just ignore missing fields.
We cleared the whole structure, above,
if (DECL_BIT_FIELD (field))
mode = VOIDmode;
- if (TREE_CODE (DECL_FIELD_BITPOS (field)) != INTEGER_CST)
- /* ??? This case remains to be written. */
- abort ();
+ pos = DECL_FIELD_BITPOS (field);
+ if (TREE_CODE (pos) == INTEGER_CST)
+ constant = pos;
+ else if (TREE_CODE (pos) == PLUS_EXPR
+ && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
+ constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
+ else
+ offset = pos;
- bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
+ if (constant)
+ bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
+
+ if (offset)
+ {
+ rtx offset_rtx;
+
+ if (contains_placeholder_p (offset))
+ offset = build (WITH_RECORD_EXPR, sizetype,
+ offset, exp);
- store_field (target, bitsize, bitpos, mode, TREE_VALUE (elt),
+ offset = size_binop (FLOOR_DIV_EXPR, offset,
+ size_int (BITS_PER_UNIT));
+
+ offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
+ if (GET_CODE (to_rtx) != MEM)
+ abort ();
+
+ to_rtx
+ = change_address (to_rtx, VOIDmode,
+ gen_rtx (PLUS, Pmode, XEXP (to_rtx, 0),
+ force_reg (Pmode, offset_rtx)));
+ }
+
+ store_field (to_rtx, bitsize, bitpos, mode, TREE_VALUE (elt),
/* The alignment of TARGET is
at least what its type requires. */
VOIDmode, 0,
else
{
if (index != 0)
- bitpos = (TREE_INT_CST_LOW (index)
+ bitpos = ((TREE_INT_CST_LOW (index) - minelt)
* TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
else
bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
align, total_size);
+ /* Even though we aren't returning target, we need to
+ give it the updated value. */
emit_move_insn (target, object);
- return target;
+ return blk_object;
}
/* If the structure is in a register or if the component
|| (STRICT_ALIGNMENT && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
{
rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
+
+ /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
+ MODE. */
+ if (mode != VOIDmode && mode != BLKmode
+ && mode != TYPE_MODE (TREE_TYPE (exp)))
+ temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
+
/* Store the value in the bitfield. */
store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
if (value_mode != VOIDmode)
int *punsignedp;
int *pvolatilep;
{
+ tree orig_exp = exp;
tree size_tree = 0;
enum machine_mode mode = VOIDmode;
tree offset = integer_zero_node;
if (integer_zerop (offset))
offset = 0;
+ if (offset != 0 && contains_placeholder_p (offset))
+ offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
+
*pmode = mode;
*poffset = offset;
-#if 0
- /* We aren't finished fixing the callers to really handle nonzero offset. */
- if (offset != 0)
- abort ();
-#endif
-
return exp;
}
\f
enum machine_mode tmode;
enum expand_modifier modifier;
{
+ /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
+ This is static so it will be accessible to our recursive callees. */
+ static tree placeholder_list = 0;
register rtx op0, op1, temp;
tree type = TREE_TYPE (exp);
int unsignedp = TREE_UNSIGNED (type);
/* If we are going to ignore this result, we need only do something
if there is a side-effect somewhere in the expression. If there
- is, short-circuit the most common cases here. */
+ is, short-circuit the most common cases here. Note that we must
+ not call expand_expr with anything but const0_rtx in case this
+ is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
if (ignore)
{
return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
VOIDmode, modifier);
- target = 0, original_target = 0;
+ target = 0;
}
/* If will do cse, generate all results into pseudo registers
return CONST0_RTX (mode);
}
- case FUNCTION_DECL:
case VAR_DECL:
+ /* If a static var's type was incomplete when the decl was written,
+ but the type is complete now, lay out the decl now. */
+ if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
+ && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
+ {
+ push_obstacks_nochange ();
+ end_temporary_allocation ();
+ layout_decl (exp, 0);
+ PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
+ pop_obstacks ();
+ }
+ case FUNCTION_DECL:
case RESULT_DECL:
if (DECL_RTL (exp) == 0)
abort ();
/* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
must be a promoted value. We return a SUBREG of the wanted mode,
- but mark it so that we know that it was already extended. Note
- that `unsignedp' was modified above in this case. */
+ but mark it so that we know that it was already extended. */
if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
&& GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
{
+ enum machine_mode var_mode = mode;
+
+ if (TREE_CODE (type) == INTEGER_TYPE
+ || TREE_CODE (type) == ENUMERAL_TYPE
+ || TREE_CODE (type) == BOOLEAN_TYPE
+ || TREE_CODE (type) == CHAR_TYPE
+ || TREE_CODE (type) == REAL_TYPE
+ || TREE_CODE (type) == POINTER_TYPE
+ || TREE_CODE (type) == OFFSET_TYPE)
+ {
+ PROMOTE_MODE (var_mode, unsignedp, type);
+ }
+
temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
SUBREG_PROMOTED_VAR_P (temp) = 1;
SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
return SAVE_EXPR_RTL (exp);
+ case PLACEHOLDER_EXPR:
+ /* If there is an object on the head of the placeholder list,
+ see if some object in it's references is of type TYPE. For
+ further information, see tree.def. */
+ if (placeholder_list)
+ {
+ tree object;
+ tree old_list = placeholder_list;
+
+ for (object = TREE_PURPOSE (placeholder_list);
+ TREE_TYPE (object) != type
+ && (TREE_CODE_CLASS (TREE_CODE (object)) == 'r'
+ || TREE_CODE_CLASS (TREE_CODE (object)) == '1'
+ || TREE_CODE_CLASS (TREE_CODE (object)) == '2'
+ || TREE_CODE_CLASS (TREE_CODE (object)) == 'e');
+ object = TREE_OPERAND (object, 0))
+ ;
+
+ if (object && TREE_TYPE (object) == type)
+ {
+ /* Expand this object skipping the list entries before
+ it was found in case it is also a PLACEHOLDER_EXPR.
+ In that case, we want to translate it using subsequent
+ entries. */
+ placeholder_list = TREE_CHAIN (placeholder_list);
+ temp = expand_expr (object, original_target, tmode, modifier);
+ placeholder_list = old_list;
+ return temp;
+ }
+ }
+
+ /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
+ abort ();
+
+ case WITH_RECORD_EXPR:
+ /* Put the object on the placeholder list, expand our first operand,
+ and pop the list. */
+ placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
+ placeholder_list);
+ target = expand_expr (TREE_OPERAND (exp, 0), original_target,
+ tmode, modifier);
+ placeholder_list = TREE_CHAIN (placeholder_list);
+ return target;
+
case EXIT_EXPR:
expand_exit_loop_if_false (NULL_PTR,
invert_truthvalue (TREE_OPERAND (exp, 0)));
abort ();
emit_insns (RTL_EXPR_SEQUENCE (exp));
RTL_EXPR_SEQUENCE (exp) = const0_rtx;
+ free_temps_for_rtl_expr (exp);
return RTL_EXPR_RTL (exp);
case CONSTRUCTOR:
tree index_type = TREE_TYPE (index);
int i;
+ if (TREE_CODE (low_bound) != INTEGER_CST
+ && contains_placeholder_p (low_bound))
+ low_bound = build (WITH_RECORD_EXPR, sizetype, low_bound, exp);
+
/* Optimize the special-case of a zero lower bound.
We convert the low_bound to sizetype to avoid some problems
tree array_adr = build1 (ADDR_EXPR,
build_pointer_type (variant_type), array);
tree elt;
+ tree size = size_in_bytes (type);
/* Convert the integer argument to a type the same size as a
pointer so the multiply won't overflow spuriously. */
if (TYPE_PRECISION (index_type) != POINTER_SIZE)
index = convert (type_for_size (POINTER_SIZE, 0), index);
+ if (TREE_CODE (size) != INTEGER_CST
+ && contains_placeholder_p (size))
+ size = build (WITH_RECORD_EXPR, sizetype, size, exp);
+
/* Don't think the address has side effects
just because the array does.
(In some cases the address might have side effects,
array_adr,
fold (build (MULT_EXPR,
TYPE_POINTER_TO (variant_type),
- index,
- size_in_bytes (type))))));
+ index, size)))));
/* Volatility, etc., of new expression is same as old
expression. */
int volatilep = 0;
tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
&mode1, &unsignedp, &volatilep);
+ int alignment;
/* If we got back the original object, something is wrong. Perhaps
we are evaluating an expression too early. In any event, don't
op0 = validize_mem (force_const_mem (mode, op0));
}
+ alignment = TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT;
if (offset != 0)
{
rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
op0 = change_address (op0, VOIDmode,
gen_rtx (PLUS, Pmode, XEXP (op0, 0),
force_reg (Pmode, offset_rtx)));
+ /* If we have a variable offset, the known alignment
+ is only that of the innermost structure containing the field.
+ (Actually, we could sometimes do better by using the
+ size of an element of the innermost array, but no need.) */
+ if (TREE_CODE (exp) == COMPONENT_REF
+ || TREE_CODE (exp) == BIT_FIELD_REF)
+ alignment = (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
+ / BITS_PER_UNIT);
}
/* Don't forget about volatility even if this is a bitfield. */
op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
unsignedp, target, ext_mode, ext_mode,
- TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
+ alignment,
int_size_in_bytes (TREE_TYPE (tem)));
if (mode == BLKmode)
{
if (RTL_EXPR_RTL (exp) == 0)
{
RTL_EXPR_RTL (exp)
- = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
+ = expand_expr (TREE_OPERAND (exp, 0),
+ target ? target : const0_rtx,
+ tmode, modifier);
cleanups_this_call
= tree_cons (NULL_TREE, TREE_OPERAND (exp, 2), cleanups_this_call);
/* That's it for this cleanup. */
target = original_target;
temp = gen_label_rtx ();
if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 0))
+ || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
|| (GET_CODE (target) == REG
&& REGNO (target) < FIRST_PSEUDO_REGISTER))
target = gen_reg_rtx (mode);
case MIN_EXPR:
target = original_target;
if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
+ || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
|| (GET_CODE (target) == REG
&& REGNO (target) < FIRST_PSEUDO_REGISTER))
target = gen_reg_rtx (mode);
treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR;
but the question is how to recognize those cases. */
+ /* TRUTH_AND_EXPR can have a result whose mode doesn't match
+ th operands. If so, don't use our target. */
case TRUTH_AND_EXPR:
+ if (mode != TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
+ subtarget = 0;
case BIT_AND_EXPR:
this_optab = and_optab;
goto binop;
/* See comment above about TRUTH_AND_EXPR; it applies here too. */
case TRUTH_OR_EXPR:
+ if (mode != TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
+ subtarget = 0;
case BIT_IOR_EXPR:
this_optab = ior_optab;
goto binop;
case TRUTH_XOR_EXPR:
+ if (mode != TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
+ subtarget = 0;
case BIT_XOR_EXPR:
this_optab = xor_optab;
goto binop;
DECL_RTL (slot) = target;
}
-#if 0
- /* I bet this needs to be done, and I bet that it needs to
- be above, inside the else clause. The reason is
- simple, how else is it going to get cleaned up? (mrs)
-
- The reason is probably did not work before, and was
- commented out is because this was re-expanding already
- expanded target_exprs (target == 0 and DECL_RTL (slot)
- != 0) also cleaning them up many times as well. :-( */
-
- /* Since SLOT is not known to the called function
- to belong to its stack frame, we must build an explicit
- cleanup. This case occurs when we must build up a reference
- to pass the reference as an argument. In this case,
- it is very likely that such a reference need not be
- built here. */
-
- if (TREE_OPERAND (exp, 2) == 0)
- TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
- if (TREE_OPERAND (exp, 2))
- cleanups_this_call = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2),
- cleanups_this_call);
-#endif
+ /* We set IGNORE when we know that we're already
+ doing this for a cleanup. */
+ if (ignore == 0)
+ {
+ /* Since SLOT is not known to the called function
+ to belong to its stack frame, we must build an explicit
+ cleanup. This case occurs when we must build up a reference
+ to pass the reference as an argument. In this case,
+ it is very likely that such a reference need not be
+ built here. */
+
+ if (TREE_OPERAND (exp, 2) == 0)
+ TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
+ if (TREE_OPERAND (exp, 2))
+ cleanups_this_call = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2),
+ cleanups_this_call);
+ }
}
else
{
}
else
{
- op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
+ /* We make sure to pass const0_rtx down if we came in with
+ ignore set, to avoid doing the cleanups twice for something. */
+ op0 = expand_expr (TREE_OPERAND (exp, 0),
+ ignore ? const0_rtx : NULL_RTX, VOIDmode,
(modifier == EXPAND_INITIALIZER
? modifier : EXPAND_CONST_ADDRESS));
emit_move_insn (gen_imagpart (mode, target), op1);
/* Complex construction should appear as a single unit. */
- group_insns (prev);
+ if (GET_CODE (target) != CONCAT)
+ /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
+ each with a separate pseudo as destination.
+ It's not correct for flow to treat them as a unit. */
+ group_insns (prev);
return target;
}
emit_move_insn (imag_t, temp);
/* Conjugate should appear as a single unit */
- group_insns (prev);
+ if (GET_CODE (target) != CONCAT)
+ /* If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
+ each with a separate pseudo as destination.
+ It's not correct for flow to treat them as a unit. */
+ group_insns (prev);
return target;
}
return const0_rtx;
default:
- return (*lang_expand_expr) (exp, target, tmode, modifier);
+ return (*lang_expand_expr) (exp, original_target, tmode, modifier);
}
/* Here to do an ordinary binary operator, generating an instruction
#ifdef DEBUG_PRINT_CODE
fprintf (stderr, " [%x]\n", TREE_INT_CST_LOW (exp));
#endif
- bc_emit_instruction (mode_to_const_map[DECL_BIT_FIELD (exp)
+ bc_emit_instruction (mode_to_const_map[(int) (DECL_BIT_FIELD (exp)
? SImode
- : TYPE_MODE (TREE_TYPE (exp))],
+ : TYPE_MODE (TREE_TYPE (exp)))],
(HOST_WIDE_INT) TREE_INT_CST_LOW (exp));
return;
SAVE_EXPR_RTL (exp) = bc_allocate_local (int_size_in_bytes (TREE_TYPE (exp)),
TYPE_ALIGN (TREE_TYPE(exp)));
bc_expand_expr (TREE_OPERAND (exp, 0));
- bc_emit_instruction (dup);
+ bc_emit_instruction (duplicate);
bc_load_localaddr (SAVE_EXPR_RTL (exp));
bc_store_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
lab = bc_get_bytecode_label ();
- bc_emit_instruction (dup);
+ bc_emit_instruction (duplicate);
bc_emit_bytecode (opcode);
bc_emit_bytecode_labelref (lab);
if (arglist == 0
/* Arg could be wrong type if user redeclared this fcn wrong. */
|| TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
- return CONST0_RTX (TYPE_MODE (TREE_TYPE (exp)));
+ break;
/* Stabilize and compute the argument. */
if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
/* Return the address of the first anonymous stack arg. */
case BUILT_IN_NEXT_ARG:
{
+ tree parm;
tree fntype = TREE_TYPE (current_function_decl);
+ tree fnargs = DECL_ARGUMENTS (current_function_decl);
if (!(TYPE_ARG_TYPES (fntype) != 0
&& (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
- != void_type_node)))
+ != void_type_node))
+ && !(fnargs
+ && (parm = tree_last (fnargs)) != 0
+ && DECL_NAME (parm)
+ && (! strcmp (IDENTIFIER_POINTER (DECL_NAME (parm)),
+ "__builtin_va_alist"))))
{
error ("`va_start' used in function with fixed args");
return const0_rtx;
if (arglist == 0
/* Arg could be non-integer if user redeclared this fcn wrong. */
|| TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
- return const0_rtx;
+ break;
current_function_calls_alloca = 1;
/* Compute the argument. */
op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
if (arglist == 0
/* Arg could be non-integer if user redeclared this fcn wrong. */
|| TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
- return const0_rtx;
+ break;
/* Compute the argument. */
op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
if (arglist == 0
/* Arg could be non-pointer if user redeclared this fcn wrong. */
|| TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
- return const0_rtx;
+ break;
else
{
tree src = TREE_VALUE (arglist);
|| TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
|| TREE_CHAIN (arglist) == 0
|| TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
- return const0_rtx;
+ break;
else
{
tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
|| TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
|| TREE_CHAIN (TREE_CHAIN (arglist)) == 0
|| TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
- return const0_rtx;
+ break;
else
{
tree dest = TREE_VALUE (arglist);
|| TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
|| TREE_CHAIN (arglist) == 0
|| TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
- return const0_rtx;
+ break;
else if (!HAVE_cmpstrsi)
break;
{
|| TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
|| TREE_CHAIN (TREE_CHAIN (arglist)) == 0
|| TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
- return const0_rtx;
+ break;
else if (!HAVE_cmpstrsi)
break;
{
case EQ_EXPR:
if (integer_zerop (TREE_OPERAND (exp, 1)))
do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
- else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
- == MODE_INT)
- &&
- !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
+ else if (((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
+ == MODE_INT)
+ &&
+ !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
+ || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))) == MODE_COMPLEX_FLOAT
+ || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))) == MODE_COMPLEX_INT)
do_jump_by_parts_equality (exp, if_false_label, if_true_label);
else
comparison = compare (exp, EQ, EQ);
case NE_EXPR:
if (integer_zerop (TREE_OPERAND (exp, 1)))
do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
- else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
- == MODE_INT)
- &&
- !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
+ else if (((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
+ == MODE_INT)
+ &&
+ !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
+ || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))) == MODE_COMPLEX_FLOAT
+ || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))) == MODE_COMPLEX_INT)
do_jump_by_parts_equality (exp, if_true_label, if_false_label);
else
comparison = compare (exp, NE, NE);
else if (if_false_label)
{
rtx insn;
- rtx prev = PREV_INSN (get_last_insn ());
+ rtx prev = get_last_insn ();
rtx branch = 0;
+ if (prev != 0)
+ prev = PREV_INSN (prev);
+
/* Output the branch with the opposite condition. Then try to invert
what is generated. If more than one insn is a branch, or if the
branch is not the last insn written, abort. If we can't invert
if (bitnum != 0)
op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
- size_int (bitnum), target, ops_unsignedp);
+ size_int (bitnum), subtarget, ops_unsignedp);
if (GET_MODE (op0) != mode)
op0 = convert_to_mode (mode, op0, ops_unsignedp);
if ((code == EQ && ! invert) || (code == NE && invert))
- op0 = expand_binop (mode, xor_optab, op0, const1_rtx, target,
+ op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
ops_unsignedp, OPTAB_LIB_WIDEN);
/* Put the AND last so it can combine with more things. */
if (bitnum != TYPE_PRECISION (type) - 1)
- op0 = expand_and (op0, const1_rtx, target);
+ op0 = expand_and (op0, const1_rtx, subtarget);
return op0;
}
|| TYPE_MODE (type) == VOIDmode)
return;
else
- opcode = mode_to_load_map [TYPE_MODE (type)];
+ opcode = mode_to_load_map [(int) TYPE_MODE (type)];
if (opcode == neverneverland)
abort ();
opcode = storeBLK;
}
else
- opcode = mode_to_store_map [TYPE_MODE (type)];
+ opcode = mode_to_store_map [(int) TYPE_MODE (type)];
if (opcode == neverneverland)
abort ();
case POINTER_TYPE:
case RECORD_TYPE:
- val = TYPE_MODE (type) | TYPE_ALIGN (type) << 8;
+ val = (int) TYPE_MODE (type) | TYPE_ALIGN (type) << 8;
break;
case ERROR_MARK:
if (list_length (CONSTRUCTOR_ELTS (constr))
!= list_length (TYPE_FIELDS (TREE_TYPE (constr))))
{
- bc_emit_instruction (dup);
+ bc_emit_instruction (duplicate);
bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
bc_emit_instruction (clearBLK);
}
if (list_length (CONSTRUCTOR_ELTS (constr)) < maxelt - minelt + 1)
{
- bc_emit_instruction (dup);
+ bc_emit_instruction (duplicate);
bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
bc_emit_instruction (clearBLK);
}