/* Convert tree expression to rtl instructions, for GNU compiler.
- Copyright (C) 1988, 92, 93, 94, 95, 1996 Free Software Foundation, Inc.
+ Copyright (C) 1988, 92, 93, 94, 95, 96, 1997 Free Software Foundation, Inc.
This file is part of GNU CC.
and in other cases as well. */
int inhibit_defer_pop;
-/* A list of all cleanups which belong to the arguments of
- function calls being expanded by expand_call. */
-tree cleanups_this_call;
-
/* When temporaries are created by TARGET_EXPRs, they are created at
this level of temp_slot_level, so that they can remain allocated
until no longer needed. CLEANUP_POINT_EXPRs define the lifetime
static void do_jump_for_compare PROTO((rtx, rtx, rtx));
static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
-static tree defer_cleanups_to PROTO((tree));
extern tree truthvalue_conversion PROTO((tree));
/* Record for each mode whether we can move a register directly to or
int mode;
for (mode = 0; mode < (int) MAX_MACHINE_MODE; mode++)
- mode_to_const_map[mode] =
- mode_to_load_map[mode] =
- mode_to_store_map[mode] = neverneverland;
+ mode_to_const_map[mode]
+ = mode_to_load_map[mode]
+ = mode_to_store_map[mode] = neverneverland;
#define DEF_MODEMAP(SYM, CODE, UCODE, CONST, LOAD, STORE) \
mode_to_const_map[(int) SYM] = CONST; \
pending_stack_adjust = 0;
inhibit_defer_pop = 0;
- cleanups_this_call = 0;
saveregs_value = 0;
apply_args_value = 0;
forced_labels = 0;
p->pending_stack_adjust = pending_stack_adjust;
p->inhibit_defer_pop = inhibit_defer_pop;
- p->cleanups_this_call = cleanups_this_call;
p->saveregs_value = saveregs_value;
p->apply_args_value = apply_args_value;
p->forced_labels = forced_labels;
pending_stack_adjust = 0;
inhibit_defer_pop = 0;
- cleanups_this_call = 0;
saveregs_value = 0;
apply_args_value = 0;
forced_labels = 0;
{
pending_stack_adjust = p->pending_stack_adjust;
inhibit_defer_pop = p->inhibit_defer_pop;
- cleanups_this_call = p->cleanups_this_call;
saveregs_value = p->saveregs_value;
apply_args_value = p->apply_args_value;
forced_labels = p->forced_labels;
if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
&& GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
&& GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
- return immed_double_const (INTVAL (x), (HOST_WIDE_INT) 0, mode);
+ {
+ HOST_WIDE_INT val = INTVAL (x);
+
+ if (oldmode != VOIDmode
+ && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
+ {
+ int width = GET_MODE_BITSIZE (oldmode);
+
+ /* We need to zero extend VAL. */
+ val &= ((HOST_WIDE_INT) 1 << width) - 1;
+ }
+
+ return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
+ }
/* We can do this with a gen_lowpart if both desired and current modes
are integer, and this is either a constant integer, a register, or a
}
/* The code above should have handled everything. */
- if (data.len != 0)
+ if (data.len > 0)
abort ();
}
to1 = (data->autinc_to
? gen_rtx (MEM, mode, data->to_addr)
- : change_address (data->to, mode,
- plus_constant (data->to_addr, data->offset)));
+ : copy_rtx (change_address (data->to, mode,
+ plus_constant (data->to_addr,
+ data->offset))));
MEM_IN_STRUCT_P (to1) = data->to_struct;
- from1 =
- (data->autinc_from
- ? gen_rtx (MEM, mode, data->from_addr)
- : change_address (data->from, mode,
- plus_constant (data->from_addr, data->offset)));
+
+ from1
+ = (data->autinc_from
+ ? gen_rtx (MEM, mode, data->from_addr)
+ : copy_rtx (change_address (data->from, mode,
+ plus_constant (data->from_addr,
+ data->offset))));
MEM_IN_STRUCT_P (from1) = data->from_struct;
#ifdef HAVE_PRE_DECREMENT
{
int i;
rtx pat, last;
+ enum machine_mode mode;
+ /* If SIZE is that of a mode no bigger than a word, just use that
+ mode's store operation. */
+ if (size <= UNITS_PER_WORD
+ && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
+ {
+ emit_move_insn (change_address (x, mode, NULL),
+ gen_rtx (REG, mode, regno));
+ return;
+ }
+
/* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
- to the left before storing to memory. */
+ to the left before storing to memory. Note that the previous test
+ doesn't handle all cases (e.g. SIZE == 3). */
if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
{
rtx tem = operand_subword (x, 0, 1, BLKmode);
to allow for AIX with 4 DF arguments after a single SI arg. The
last DF argument will only load 1 word into the integer registers,
but load a DF value into the float registers. */
+ else if ((GET_MODE_SIZE (GET_MODE (target_reg))
+ <= GET_MODE_SIZE (GET_MODE (y)))
+ && GET_MODE (target_reg) == word_mode)
+ /* This might be a const_double, so we can't just use SUBREG. */
+ source = operand_subword (y, 0, 0, VOIDmode);
else if (GET_MODE_SIZE (GET_MODE (target_reg))
- <= GET_MODE_SIZE (GET_MODE (y)))
- source = gen_rtx (SUBREG, GET_MODE (target_reg), y, 0);
+ == GET_MODE_SIZE (GET_MODE (y)))
+ source = gen_lowpart (GET_MODE (target_reg), y);
else
abort ();
}
plus_constant (XEXP (x, 0),
INTVAL (XEXP (element, 1))));
else if (XEXP (element, 1) == const0_rtx)
- target = x;
+ {
+ target = x;
+ if (GET_MODE (target) != GET_MODE (source_reg))
+ target = gen_lowpart (GET_MODE (source_reg), target);
+ }
else
abort ();
to1 = (data->autinc_to
? gen_rtx (MEM, mode, data->to_addr)
- : change_address (data->to, mode,
- plus_constant (data->to_addr, data->offset)));
+ : copy_rtx (change_address (data->to, mode,
+ plus_constant (data->to_addr,
+ data->offset))));
MEM_IN_STRUCT_P (to1) = data->to_struct;
#ifdef HAVE_PRE_DECREMENT
Default is below for small data on big-endian machines; else above. */
enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
- /* If we're placing part of X into a register and part of X onto
- the stack, indicate that the entire register is clobbered to
- keep flow from thinking the unused part of the register is live. */
- if (partial > 0 && reg != 0)
- emit_insn (gen_rtx (CLOBBER, VOIDmode, reg));
-
/* Invert direction if stack is post-update. */
if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
if (where_pad != none)
int alignment;
push_temp_slots ();
- tem = get_inner_reference (to, &bitsize, &bitpos, &offset,
- &mode1, &unsignedp, &volatilep);
+ tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
+ &unsignedp, &volatilep, &alignment);
/* If we are going to use store_bit_field and extract_bit_field,
make sure to_rtx will be safe for multiple use. */
if (mode1 == VOIDmode && want_value)
tem = stabilize_reference (tem);
- alignment = TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT;
to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
if (offset != 0)
{
to_rtx = change_address (to_rtx, VOIDmode,
gen_rtx (PLUS, ptr_mode, XEXP (to_rtx, 0),
force_reg (ptr_mode, offset_rtx)));
- /* If we have a variable offset, the known alignment
- is only that of the innermost structure containing the field.
- (Actually, we could sometimes do better by using the
- align of an element of the innermost array, but no need.) */
- if (TREE_CODE (to) == COMPONENT_REF
- || TREE_CODE (to) == BIT_FIELD_REF)
- alignment
- = TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (to, 0))) / BITS_PER_UNIT;
}
if (volatilep)
{
structure we are storing into, and hence may be shared.
We must make a new MEM before setting the volatile bit. */
if (offset == 0)
- to_rtx = change_address (to_rtx, VOIDmode, XEXP (to_rtx, 0));
+ to_rtx = copy_rtx (to_rtx);
+
MEM_VOLATILE_P (to_rtx) = 1;
}
#if 0 /* This was turned off because, when a field is volatile
For non-BLKmode, it is more efficient not to do this. */
rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
- rtx flag = NULL_RTX;
- tree left_cleanups = NULL_TREE;
- tree right_cleanups = NULL_TREE;
- tree old_cleanups = cleanups_this_call;
-
- /* Used to save a pointer to the place to put the setting of
- the flag that indicates if this side of the conditional was
- taken. We backpatch the code, if we find out later that we
- have any conditional cleanups that need to be performed. */
- rtx dest_right_flag = NULL_RTX;
- rtx dest_left_flag = NULL_RTX;
emit_queue ();
target = protect_from_queue (target, 1);
do_pending_stack_adjust ();
NO_DEFER_POP;
jumpifnot (TREE_OPERAND (exp, 0), lab1);
+ start_cleanup_deferal ();
store_expr (TREE_OPERAND (exp, 1), target, 0);
- dest_left_flag = get_last_insn ();
- /* Handle conditional cleanups, if any. */
- left_cleanups = defer_cleanups_to (old_cleanups);
+ end_cleanup_deferal ();
emit_queue ();
emit_jump_insn (gen_jump (lab2));
emit_barrier ();
emit_label (lab1);
+ start_cleanup_deferal ();
store_expr (TREE_OPERAND (exp, 2), target, 0);
- dest_right_flag = get_last_insn ();
- /* Handle conditional cleanups, if any. */
- right_cleanups = defer_cleanups_to (old_cleanups);
+ end_cleanup_deferal ();
emit_queue ();
emit_label (lab2);
OK_DEFER_POP;
- /* Add back in any conditional cleanups. */
- if (left_cleanups || right_cleanups)
- {
- tree new_cleanups;
- tree cond;
- rtx last;
-
- /* Now that we know that a flag is needed, go back and add in the
- setting of the flag. */
-
- flag = gen_reg_rtx (word_mode);
-
- /* Do the left side flag. */
- last = get_last_insn ();
- /* Flag left cleanups as needed. */
- emit_move_insn (flag, const1_rtx);
- /* ??? deprecated, use sequences instead. */
- reorder_insns (NEXT_INSN (last), get_last_insn (), dest_left_flag);
-
- /* Do the right side flag. */
- last = get_last_insn ();
- /* Flag left cleanups as needed. */
- emit_move_insn (flag, const0_rtx);
- /* ??? deprecated, use sequences instead. */
- reorder_insns (NEXT_INSN (last), get_last_insn (), dest_right_flag);
-
- /* All cleanups must be on the function_obstack. */
- push_obstacks_nochange ();
- resume_temporary_allocation ();
-
- /* convert flag, which is an rtx, into a tree. */
- cond = make_node (RTL_EXPR);
- TREE_TYPE (cond) = integer_type_node;
- RTL_EXPR_RTL (cond) = flag;
- RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
- cond = save_expr (cond);
-
- if (! left_cleanups)
- left_cleanups = integer_zero_node;
- if (! right_cleanups)
- right_cleanups = integer_zero_node;
- new_cleanups = build (COND_EXPR, void_type_node,
- truthvalue_conversion (cond),
- left_cleanups, right_cleanups);
- new_cleanups = fold (new_cleanups);
-
- pop_obstacks ();
-
- /* Now add in the conditionalized cleanups. */
- cleanups_this_call
- = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
- expand_eh_region_start ();
- }
return want_value ? target : NULL_RTX;
}
else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
if (!(target && GET_CODE (target) == REG
&& REGNO (target) < FIRST_PSEUDO_REGISTER)
&& !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
- && temp != target
+ && ! rtx_equal_p (temp, target)
&& (CONSTANT_P (temp) || want_value))
dont_return_target = 1;
}
/* If value was not generated in the target, store it there.
Convert the value to TARGET's type first if nec. */
- if (temp != target && TREE_CODE (exp) != ERROR_MARK)
+ if (! rtx_equal_p (temp, target) && TREE_CODE (exp) != ERROR_MARK)
{
target = protect_from_queue (target, 1);
if (GET_MODE (temp) != GET_MODE (target)
if (TREE_READONLY (field))
{
if (GET_CODE (to_rtx) == MEM)
- to_rtx = change_address (to_rtx, GET_MODE (to_rtx),
- XEXP (to_rtx, 0));
+ to_rtx = copy_rtx (to_rtx);
+
RTX_UNCHANGING_P (to_rtx) = 1;
}
/* Now build a reference to just the desired component. */
- to_rtx = change_address (target, mode,
- plus_constant (addr, (bitpos / BITS_PER_UNIT)));
+ to_rtx = copy_rtx (change_address (target, mode,
+ plus_constant (addr,
+ (bitpos
+ / BITS_PER_UNIT))));
MEM_IN_STRUCT_P (to_rtx) = 1;
return store_expr (exp, to_rtx, value_mode != VOIDmode);
giving the variable offset (in units) in *POFFSET.
This offset is in addition to the bit position.
If the position is not variable, we store 0 in *POFFSET.
+ We set *PALIGNMENT to the alignment in bytes of the address that will be
+ computed. This is the alignment of the thing we return if *POFFSET
+ is zero, but can be more less strictly aligned if *POFFSET is nonzero.
If any of the extraction expressions is volatile,
we store 1 in *PVOLATILEP. Otherwise we don't change that.
If the field describes a variable-sized object, *PMODE is set to
VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
- this case, but the address of the object can be found. */
+ this case, but the address of the object can be found. */
tree
get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
- punsignedp, pvolatilep)
+ punsignedp, pvolatilep, palignment)
tree exp;
int *pbitsize;
int *pbitpos;
enum machine_mode *pmode;
int *punsignedp;
int *pvolatilep;
+ int *palignment;
{
tree orig_exp = exp;
tree size_tree = 0;
enum machine_mode mode = VOIDmode;
tree offset = integer_zero_node;
+ int alignment = BIGGEST_ALIGNMENT;
if (TREE_CODE (exp) == COMPONENT_REF)
{
}
index = fold (build (MULT_EXPR, index_type, index,
- TYPE_SIZE (TREE_TYPE (exp))));
+ convert (index_type,
+ TYPE_SIZE (TREE_TYPE (exp)))));
if (TREE_CODE (index) == INTEGER_CST
&& TREE_INT_CST_HIGH (index) == 0)
/* If any reference in the chain is volatile, the effect is volatile. */
if (TREE_THIS_VOLATILE (exp))
*pvolatilep = 1;
+
+ /* If the offset is non-constant already, then we can't assume any
+ alignment more than the alignment here. */
+ if (! integer_zerop (offset))
+ alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
+
exp = TREE_OPERAND (exp, 0);
}
+ if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
+ alignment = MIN (alignment, DECL_ALIGN (exp));
+ else if (TREE_TYPE (exp) != 0)
+ alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
+
if (integer_zerop (offset))
offset = 0;
*pmode = mode;
*poffset = offset;
+ *palignment = alignment / BITS_PER_UNIT;
return exp;
}
\f
further information, see tree.def. */
if (placeholder_list)
{
- tree object;
+ tree need_type = TYPE_MAIN_VARIANT (type);
+ tree object = 0;
tree old_list = placeholder_list;
+ tree elt;
- for (object = TREE_PURPOSE (placeholder_list);
- (TYPE_MAIN_VARIANT (TREE_TYPE (object))
- != TYPE_MAIN_VARIANT (type))
- && (TREE_CODE_CLASS (TREE_CODE (object)) == 'r'
- || TREE_CODE_CLASS (TREE_CODE (object)) == '1'
- || TREE_CODE_CLASS (TREE_CODE (object)) == '2'
- || TREE_CODE_CLASS (TREE_CODE (object)) == 'e');
- object = TREE_OPERAND (object, 0))
- ;
-
- if (object != 0
- && (TYPE_MAIN_VARIANT (TREE_TYPE (object))
- == TYPE_MAIN_VARIANT (type)))
+ /* See if the object is the type that we want. Then see if
+ the operand of any reference is the type we want. */
+ if ((TYPE_MAIN_VARIANT (TREE_TYPE (TREE_PURPOSE (placeholder_list)))
+ == need_type))
+ object = TREE_PURPOSE (placeholder_list);
+
+ /* Find the innermost reference that is of the type we want. */
+ for (elt = TREE_PURPOSE (placeholder_list);
+ elt != 0
+ && (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
+ || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
+ || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
+ || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e');
+ elt = ((TREE_CODE (elt) == COMPOUND_EXPR
+ || TREE_CODE (elt) == COND_EXPR)
+ ? TREE_OPERAND (elt, 1) : TREE_OPERAND (elt, 0)))
+ if (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
+ && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (elt, 0)))
+ == need_type))
+ object = TREE_OPERAND (elt, 0);
+
+ if (object != 0)
{
/* Expand this object skipping the list entries before
it was found in case it is also a PLACEHOLDER_EXPR.
int vars_need_expansion = 0;
/* Need to open a binding contour here because
- if there are any cleanups they most be contained here. */
+ if there are any cleanups they must be contained here. */
expand_start_bindings (0);
/* Mark the corresponding BLOCK for output in its proper place. */
}
case RTL_EXPR:
- if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
- abort ();
- emit_insns (RTL_EXPR_SEQUENCE (exp));
- RTL_EXPR_SEQUENCE (exp) = const0_rtx;
+ if (RTL_EXPR_SEQUENCE (exp))
+ {
+ if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
+ abort ();
+ emit_insns (RTL_EXPR_SEQUENCE (exp));
+ RTL_EXPR_SEQUENCE (exp) = const0_rtx;
+ }
preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
free_temps_for_rtl_expr (exp);
return RTL_EXPR_RTL (exp);
else
{
- if (target == 0 || ! safe_from_p (target, exp))
+ /* Handle calls that pass values in multiple non-contiguous
+ locations. The Irix 6 ABI has examples of this. */
+ if (target == 0 || ! safe_from_p (target, exp)
+ || GET_CODE (target) == PARALLEL)
{
if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
if (TREE_READONLY (exp))
{
if (GET_CODE (target) == MEM)
- target = change_address (target, GET_MODE (target),
- XEXP (target, 0));
+ target = copy_rtx (target);
+
RTX_UNCHANGING_P (target) = 1;
}
for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
elt = TREE_CHAIN (elt))
- if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
- return expand_expr (TREE_VALUE (elt), target, tmode, modifier);
+ if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
+ /* We can normally use the value of the field in the
+ CONSTRUCTOR. However, if this is a bitfield in
+ an integral mode that we can fit in a HOST_WIDE_INT,
+ we must mask only the number of bits in the bitfield,
+ since this is done implicitly by the constructor. If
+ the bitfield does not meet either of those conditions,
+ we can't do this optimization. */
+ && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
+ || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
+ == MODE_INT)
+ && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
+ <= HOST_BITS_PER_WIDE_INT))))
+ {
+ op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
+ if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
+ {
+ int bitsize = DECL_FIELD_SIZE (TREE_PURPOSE (elt));
+ enum machine_mode imode
+ = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
+
+ if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
+ {
+ op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
+ op0 = expand_and (op0, op1, target);
+ }
+ else
+ {
+ tree count
+ = build_int_2 (imode - bitsize, 0);
+
+ op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
+ target, 0);
+ op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
+ target, 0);
+ }
+ }
+
+ return op0;
+ }
}
{
int bitpos;
tree offset;
int volatilep = 0;
- tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
- &mode1, &unsignedp, &volatilep);
int alignment;
+ tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
+ &mode1, &unsignedp, &volatilep,
+ &alignment);
/* If we got back the original object, something is wrong. Perhaps
we are evaluating an expression too early. In any event, don't
op0 = validize_mem (force_const_mem (mode, op0));
}
- alignment = TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT;
if (offset != 0)
{
rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
op0 = change_address (op0, VOIDmode,
gen_rtx (PLUS, ptr_mode, XEXP (op0, 0),
force_reg (ptr_mode, offset_rtx)));
- /* If we have a variable offset, the known alignment
- is only that of the innermost structure containing the field.
- (Actually, we could sometimes do better by using the
- size of an element of the innermost array, but no need.) */
- if (TREE_CODE (exp) == COMPONENT_REF
- || TREE_CODE (exp) == BIT_FIELD_REF)
- alignment = (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
- / BITS_PER_UNIT);
}
/* Don't forget about volatility even if this is a bitfield. */
MEM_IN_STRUCT_P (op0) = 1;
MEM_VOLATILE_P (op0) |= volatilep;
- if (mode == mode1 || mode1 == BLKmode || mode1 == tmode)
+ if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
+ || modifier == EXPAND_CONST_ADDRESS
+ || modifier == EXPAND_INITIALIZER)
return op0;
- if (target == 0)
+ else if (target == 0)
target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
+
convert_move (target, op0, unsignedp);
return target;
}
{
RTL_EXPR_RTL (exp)
= expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
- cleanups_this_call
- = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2), cleanups_this_call);
+ expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
+
/* That's it for this cleanup. */
TREE_OPERAND (exp, 2) = 0;
- expand_eh_region_start ();
}
return RTL_EXPR_RTL (exp);
case CLEANUP_POINT_EXPR:
{
extern int temp_slot_level;
- tree old_cleanups = cleanups_this_call;
- int old_temp_level = target_temp_slot_level;
- push_temp_slots ();
+ /* Start a new binding layer that will keep track of all cleanup
+ actions to be performed. */
+ expand_start_bindings (0);
+
target_temp_slot_level = temp_slot_level;
+
op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
/* If we're going to use this value, load it up now. */
if (! ignore)
op0 = force_not_mem (op0);
- expand_cleanups_to (old_cleanups);
preserve_temp_slots (op0);
- free_temp_slots ();
- pop_temp_slots ();
- target_temp_slot_level = old_temp_level;
+ expand_end_bindings (NULL_TREE, 0, 0);
}
return op0;
VOIDmode, 0);
case COND_EXPR:
- {
- rtx flag = NULL_RTX;
- tree left_cleanups = NULL_TREE;
- tree right_cleanups = NULL_TREE;
-
- /* Used to save a pointer to the place to put the setting of
- the flag that indicates if this side of the conditional was
- taken. We backpatch the code, if we find out later that we
- have any conditional cleanups that need to be performed. */
- rtx dest_right_flag = NULL_RTX;
- rtx dest_left_flag = NULL_RTX;
+ /* If we would have a "singleton" (see below) were it not for a
+ conversion in each arm, bring that conversion back out. */
+ if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
+ && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
+ && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
+ == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
+ {
+ tree true = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
+ tree false = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
+
+ if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
+ && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
+ || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
+ && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
+ || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
+ && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
+ || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
+ && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
+ return expand_expr (build1 (NOP_EXPR, type,
+ build (COND_EXPR, TREE_TYPE (true),
+ TREE_OPERAND (exp, 0),
+ true, false)),
+ target, tmode, modifier);
+ }
+ {
/* Note that COND_EXPRs whose type is a structure or union
are required to be constructed to contain assignments of
a temporary variable, so that we can evaluate them here
tree singleton = 0;
tree binary_op = 0, unary_op = 0;
- tree old_cleanups = cleanups_this_call;
/* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
convert it to our mode, if necessary. */
return target;
}
- /* Check for X ? A + B : A. If we have this, we can copy
- A to the output and conditionally add B. Similarly for unary
- operations. Don't do this if X has side-effects because
- those side effects might affect A or B and the "?" operation is
- a sequence point in ANSI. (We test for side effects later.) */
+ /* Check for X ? A + B : A. If we have this, we can copy A to the
+ output and conditionally add B. Similarly for unary operations.
+ Don't do this if X has side-effects because those side effects
+ might affect A or B and the "?" operation is a sequence point in
+ ANSI. (operand_equal_p tests for side effects.) */
if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
&& operand_equal_p (TREE_OPERAND (exp, 2),
else
temp = assign_temp (type, 0, 0, 1);
- /* If we had X ? A + 1 : A and we can do the test of X as a store-flag
- operation, do this as A + (X != 0). Similarly for other simple
- binary operators. */
+ /* If we had X ? A + C : A, with C a constant power of 2, and we can
+ do the test of X as a store-flag operation, do this as
+ A + ((X != 0) << log C). Similarly for other simple binary
+ operators. Only do for C == 1 if BRANCH_COST is low. */
if (temp && singleton && binary_op
- && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
&& (TREE_CODE (binary_op) == PLUS_EXPR
|| TREE_CODE (binary_op) == MINUS_EXPR
|| TREE_CODE (binary_op) == BIT_IOR_EXPR
|| TREE_CODE (binary_op) == BIT_XOR_EXPR)
- && integer_onep (TREE_OPERAND (binary_op, 1))
+ && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
+ : integer_onep (TREE_OPERAND (binary_op, 1)))
&& TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
{
rtx result;
? temp : NULL_RTX),
mode, BRANCH_COST <= 1);
+ if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
+ result = expand_shift (LSHIFT_EXPR, mode, result,
+ build_int_2 (tree_log2
+ (TREE_OPERAND
+ (binary_op, 1)),
+ 0),
+ (safe_from_p (temp, singleton)
+ ? temp : NULL_RTX), 0);
+
if (result)
{
op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
NO_DEFER_POP;
op0 = gen_label_rtx ();
- flag = gen_reg_rtx (word_mode);
if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
{
if (temp != 0)
else
expand_expr (singleton,
ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
- dest_left_flag = get_last_insn ();
if (singleton == TREE_OPERAND (exp, 1))
jumpif (TREE_OPERAND (exp, 0), op0);
else
jumpifnot (TREE_OPERAND (exp, 0), op0);
- /* Allows cleanups up to here. */
- old_cleanups = cleanups_this_call;
+ start_cleanup_deferal ();
if (binary_op && temp == 0)
/* Just touch the other operand. */
expand_expr (TREE_OPERAND (binary_op, 1),
make_tree (type, temp)),
temp, 0);
op1 = op0;
- dest_right_flag = get_last_insn ();
- }
-#if 0
- /* This is now done in jump.c and is better done there because it
- produces shorter register lifetimes. */
-
- /* Check for both possibilities either constants or variables
- in registers (but not the same as the target!). If so, can
- save branches by assigning one, branching, and assigning the
- other. */
- else if (temp && GET_MODE (temp) != BLKmode
- && (TREE_CONSTANT (TREE_OPERAND (exp, 1))
- || ((TREE_CODE (TREE_OPERAND (exp, 1)) == PARM_DECL
- || TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL)
- && DECL_RTL (TREE_OPERAND (exp, 1))
- && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 1))) == REG
- && DECL_RTL (TREE_OPERAND (exp, 1)) != temp))
- && (TREE_CONSTANT (TREE_OPERAND (exp, 2))
- || ((TREE_CODE (TREE_OPERAND (exp, 2)) == PARM_DECL
- || TREE_CODE (TREE_OPERAND (exp, 2)) == VAR_DECL)
- && DECL_RTL (TREE_OPERAND (exp, 2))
- && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 2))) == REG
- && DECL_RTL (TREE_OPERAND (exp, 2)) != temp)))
- {
- if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
- temp = gen_reg_rtx (mode);
- store_expr (TREE_OPERAND (exp, 2), temp, 0);
- dest_left_flag = get_last_insn ();
- jumpifnot (TREE_OPERAND (exp, 0), op0);
-
- /* Allows cleanups up to here. */
- old_cleanups = cleanups_this_call;
- store_expr (TREE_OPERAND (exp, 1), temp, 0);
- op1 = op0;
- dest_right_flag = get_last_insn ();
}
-#endif
/* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
comparison operator. If we have one of these cases, set the
output to A, branch on A (cse will merge these two references),
if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
temp = gen_reg_rtx (mode);
store_expr (TREE_OPERAND (exp, 1), temp, 0);
- dest_left_flag = get_last_insn ();
jumpif (TREE_OPERAND (exp, 0), op0);
- /* Allows cleanups up to here. */
- old_cleanups = cleanups_this_call;
+ start_cleanup_deferal ();
store_expr (TREE_OPERAND (exp, 2), temp, 0);
op1 = op0;
- dest_right_flag = get_last_insn ();
}
else if (temp
&& TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
temp = gen_reg_rtx (mode);
store_expr (TREE_OPERAND (exp, 2), temp, 0);
- dest_left_flag = get_last_insn ();
jumpifnot (TREE_OPERAND (exp, 0), op0);
- /* Allows cleanups up to here. */
- old_cleanups = cleanups_this_call;
+ start_cleanup_deferal ();
store_expr (TREE_OPERAND (exp, 1), temp, 0);
op1 = op0;
- dest_right_flag = get_last_insn ();
}
else
{
op1 = gen_label_rtx ();
jumpifnot (TREE_OPERAND (exp, 0), op0);
- /* Allows cleanups up to here. */
- old_cleanups = cleanups_this_call;
+ start_cleanup_deferal ();
if (temp != 0)
store_expr (TREE_OPERAND (exp, 1), temp, 0);
else
expand_expr (TREE_OPERAND (exp, 1),
ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
- dest_left_flag = get_last_insn ();
-
- /* Handle conditional cleanups, if any. */
- left_cleanups = defer_cleanups_to (old_cleanups);
-
+ end_cleanup_deferal ();
emit_queue ();
emit_jump_insn (gen_jump (op1));
emit_barrier ();
emit_label (op0);
+ start_cleanup_deferal ();
if (temp != 0)
store_expr (TREE_OPERAND (exp, 2), temp, 0);
else
expand_expr (TREE_OPERAND (exp, 2),
ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
- dest_right_flag = get_last_insn ();
}
- /* Handle conditional cleanups, if any. */
- right_cleanups = defer_cleanups_to (old_cleanups);
+ end_cleanup_deferal ();
emit_queue ();
emit_label (op1);
OK_DEFER_POP;
- /* Add back in, any conditional cleanups. */
- if (left_cleanups || right_cleanups)
- {
- tree new_cleanups;
- tree cond;
- rtx last;
-
- /* Now that we know that a flag is needed, go back and add in the
- setting of the flag. */
-
- /* Do the left side flag. */
- last = get_last_insn ();
- /* Flag left cleanups as needed. */
- emit_move_insn (flag, const1_rtx);
- /* ??? deprecated, use sequences instead. */
- reorder_insns (NEXT_INSN (last), get_last_insn (), dest_left_flag);
-
- /* Do the right side flag. */
- last = get_last_insn ();
- /* Flag left cleanups as needed. */
- emit_move_insn (flag, const0_rtx);
- /* ??? deprecated, use sequences instead. */
- reorder_insns (NEXT_INSN (last), get_last_insn (), dest_right_flag);
-
- /* All cleanups must be on the function_obstack. */
- push_obstacks_nochange ();
- resume_temporary_allocation ();
-
- /* convert flag, which is an rtx, into a tree. */
- cond = make_node (RTL_EXPR);
- TREE_TYPE (cond) = integer_type_node;
- RTL_EXPR_RTL (cond) = flag;
- RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
- cond = save_expr (cond);
-
- if (! left_cleanups)
- left_cleanups = integer_zero_node;
- if (! right_cleanups)
- right_cleanups = integer_zero_node;
- new_cleanups = build (COND_EXPR, void_type_node,
- truthvalue_conversion (cond),
- left_cleanups, right_cleanups);
- new_cleanups = fold (new_cleanups);
-
- pop_obstacks ();
-
- /* Now add in the conditionalized cleanups. */
- cleanups_this_call
- = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
- expand_eh_region_start ();
- }
return temp;
}
store_expr (exp1, target, 0);
- if (cleanups)
- {
- cleanups_this_call = tree_cons (NULL_TREE,
- cleanups,
- cleanups_this_call);
- expand_eh_region_start ();
- }
+ expand_decl_cleanup (NULL_TREE, cleanups);
return target;
}
return target;
}
+ case TRY_CATCH_EXPR:
+ {
+ tree handler = TREE_OPERAND (exp, 1);
+
+ expand_eh_region_start ();
+
+ op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
+
+ expand_eh_region_end (handler);
+
+ return op0;
+ }
+
+ case POPDCC_EXPR:
+ {
+ rtx dcc = get_dynamic_cleanup_chain ();
+ emit_move_insn (dcc, validize_mem (gen_rtx (MEM, Pmode, dcc)));
+ return const0_rtx;
+ }
+
+ case POPDHC_EXPR:
+ {
+ rtx dhc = get_dynamic_handler_chain ();
+ emit_move_insn (dhc, validize_mem (gen_rtx (MEM, Pmode, dhc)));
+ return const0_rtx;
+ }
+
case ERROR_MARK:
op0 = CONST0_RTX (tmode);
if (op0 != 0)
#endif
return tem;
}
+
+/* __builtin_setjmp is passed a pointer to an array of five words (not
+ all will be used on all machines). It operates similarly to the C
+ library function of the same name, but is more efficient. Much of
+ the code below (and for longjmp) is copied from the handling of
+ non-local gotos.
+
+ NOTE: This is intended for use by GNAT and the exception handling
+ scheme in the compiler and will only work in the method used by
+ them. */
+
+rtx
+expand_builtin_setjmp (buf_addr, target)
+ rtx buf_addr;
+ rtx target;
+{
+ rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
+ enum machine_mode sa_mode = Pmode, value_mode;
+ rtx stack_save;
+ int old_inhibit_defer_pop = inhibit_defer_pop;
+ int return_pops
+ = RETURN_POPS_ARGS (get_identifier ("__dummy"),
+ build_function_type (void_type_node, NULL_TREE),
+ 0);
+ rtx next_arg_reg;
+ CUMULATIVE_ARGS args_so_far;
+ rtx op0;
+ int i;
+
+ value_mode = TYPE_MODE (integer_type_node);
+
+#ifdef POINTERS_EXTEND_UNSIGNED
+ buf_addr = convert_memory_address (Pmode, buf_addr);
+#endif
+
+ buf_addr = force_reg (Pmode, buf_addr);
+
+ if (target == 0 || GET_CODE (target) != REG
+ || REGNO (target) < FIRST_PSEUDO_REGISTER)
+ target = gen_reg_rtx (value_mode);
+
+ emit_queue ();
+
+ CONST_CALL_P (emit_note (NULL_PTR, NOTE_INSN_SETJMP)) = 1;
+ current_function_calls_setjmp = 1;
+
+ /* We store the frame pointer and the address of lab1 in the buffer
+ and use the rest of it for the stack save area, which is
+ machine-dependent. */
+ emit_move_insn (gen_rtx (MEM, Pmode, buf_addr),
+ virtual_stack_vars_rtx);
+ emit_move_insn
+ (validize_mem (gen_rtx (MEM, Pmode,
+ plus_constant (buf_addr,
+ GET_MODE_SIZE (Pmode)))),
+ gen_rtx (LABEL_REF, Pmode, lab1));
+
+#ifdef HAVE_save_stack_nonlocal
+ if (HAVE_save_stack_nonlocal)
+ sa_mode = insn_operand_mode[(int) CODE_FOR_save_stack_nonlocal][0];
+#endif
+
+ stack_save = gen_rtx (MEM, sa_mode,
+ plus_constant (buf_addr,
+ 2 * GET_MODE_SIZE (Pmode)));
+ emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
+
+#ifdef HAVE_setjmp
+ if (HAVE_setjmp)
+ emit_insn (gen_setjmp ());
+#endif
+
+ /* Set TARGET to zero and branch around the other case. */
+ emit_move_insn (target, const0_rtx);
+ emit_jump_insn (gen_jump (lab2));
+ emit_barrier ();
+ emit_label (lab1);
+
+ /* Note that setjmp clobbers FP when we get here, so we have to make
+ sure it's marked as used by this function. */
+ emit_insn (gen_rtx (USE, VOIDmode, hard_frame_pointer_rtx));
+
+ /* Mark the static chain as clobbered here so life information
+ doesn't get messed up for it. */
+ emit_insn (gen_rtx (CLOBBER, VOIDmode, static_chain_rtx));
+
+ /* Now put in the code to restore the frame pointer, and argument
+ pointer, if needed. The code below is from expand_end_bindings
+ in stmt.c; see detailed documentation there. */
+#ifdef HAVE_nonlocal_goto
+ if (! HAVE_nonlocal_goto)
+#endif
+ emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
+
+ /* Do we need to do something like:
+
+ current_function_has_nonlocal_label = 1;
+
+ here? It seems like we might have to, or some subset of that
+ functionality, but I am unsure. (mrs) */
+
+#if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
+ if (fixed_regs[ARG_POINTER_REGNUM])
+ {
+#ifdef ELIMINABLE_REGS
+ static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
+
+ for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
+ if (elim_regs[i].from == ARG_POINTER_REGNUM
+ && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
+ break;
+
+ if (i == sizeof elim_regs / sizeof elim_regs [0])
+#endif
+ {
+ /* Now restore our arg pointer from the address at which it
+ was saved in our stack frame.
+ If there hasn't be space allocated for it yet, make
+ some now. */
+ if (arg_pointer_save_area == 0)
+ arg_pointer_save_area
+ = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
+ emit_move_insn (virtual_incoming_args_rtx,
+ copy_to_reg (arg_pointer_save_area));
+ }
+ }
+#endif
+
+#ifdef HAVE_nonlocal_goto_receiver
+ if (HAVE_nonlocal_goto_receiver)
+ emit_insn (gen_nonlocal_goto_receiver ());
+#endif
+ /* The static chain pointer contains the address of dummy function.
+ We need to call it here to handle some PIC cases of restoring a
+ global pointer. Then return 1. */
+ op0 = copy_to_mode_reg (Pmode, static_chain_rtx);
+
+ /* We can't actually call emit_library_call here, so do everything
+ it does, which isn't much for a libfunc with no args. */
+ op0 = memory_address (FUNCTION_MODE, op0);
+
+ INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE,
+ gen_rtx (SYMBOL_REF, Pmode, "__dummy"), 1);
+ next_arg_reg = FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1);
+
+#ifndef ACCUMULATE_OUTGOING_ARGS
+#ifdef HAVE_call_pop
+ if (HAVE_call_pop)
+ emit_call_insn (gen_call_pop (gen_rtx (MEM, FUNCTION_MODE, op0),
+ const0_rtx, next_arg_reg,
+ GEN_INT (return_pops)));
+ else
+#endif
+#endif
+
+#ifdef HAVE_call
+ if (HAVE_call)
+ emit_call_insn (gen_call (gen_rtx (MEM, FUNCTION_MODE, op0),
+ const0_rtx, next_arg_reg, const0_rtx));
+ else
+#endif
+ abort ();
+
+ emit_move_insn (target, const1_rtx);
+ emit_label (lab2);
+ return target;
+}
+
\f
/* Expand an expression EXP that calls a built-in function,
with result going to TARGET if that's convenient
if (arglist == 0)
/* Warning about missing arg was already issued. */
return const0_rtx;
- else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST)
- {
- error ("invalid arg to `__builtin_return_address'");
- return const0_rtx;
- }
- else if (tree_int_cst_sgn (TREE_VALUE (arglist)) < 0)
+ else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST
+ || tree_int_cst_sgn (TREE_VALUE (arglist)) < 0)
{
- error ("invalid arg to `__builtin_return_address'");
+ if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
+ error ("invalid arg to `__builtin_frame_address'");
+ else
+ error ("invalid arg to `__builtin_return_address'");
return const0_rtx;
}
else
break;
#endif
- /* __builtin_setjmp is passed a pointer to an array of five words
- (not all will be used on all machines). It operates similarly to
- the C library function of the same name, but is more efficient.
- Much of the code below (and for longjmp) is copied from the handling
- of non-local gotos.
-
- NOTE: This is intended for use by GNAT and will only work in
- the method used by it. This code will likely NOT survive to
- the GCC 2.8.0 release. */
case BUILT_IN_SETJMP:
if (arglist == 0
|| TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
{
rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
VOIDmode, 0);
- rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
- enum machine_mode sa_mode = Pmode;
- rtx stack_save;
- int old_inhibit_defer_pop = inhibit_defer_pop;
- int return_pops = RETURN_POPS_ARGS (get_identifier ("__dummy"),
- get_identifier ("__dummy"), 0);
- rtx next_arg_reg;
- CUMULATIVE_ARGS args_so_far;
- int i;
-
-#ifdef POINTERS_EXTEND_UNSIGNED
- buf_addr = convert_memory_address (Pmode, buf_addr);
-#endif
-
- buf_addr = force_reg (Pmode, buf_addr);
-
- if (target == 0 || GET_CODE (target) != REG
- || REGNO (target) < FIRST_PSEUDO_REGISTER)
- target = gen_reg_rtx (value_mode);
-
- emit_queue ();
-
- CONST_CALL_P (emit_note (NULL_PTR, NOTE_INSN_SETJMP)) = 1;
- current_function_calls_setjmp = 1;
-
- /* We store the frame pointer and the address of lab1 in the buffer
- and use the rest of it for the stack save area, which is
- machine-dependent. */
- emit_move_insn (gen_rtx (MEM, Pmode, buf_addr),
- virtual_stack_vars_rtx);
- emit_move_insn
- (validize_mem (gen_rtx (MEM, Pmode,
- plus_constant (buf_addr,
- GET_MODE_SIZE (Pmode)))),
- gen_rtx (LABEL_REF, Pmode, lab1));
-
-#ifdef HAVE_save_stack_nonlocal
- if (HAVE_save_stack_nonlocal)
- sa_mode = insn_operand_mode[(int) CODE_FOR_save_stack_nonlocal][0];
-#endif
-
- stack_save = gen_rtx (MEM, sa_mode,
- plus_constant (buf_addr,
- 2 * GET_MODE_SIZE (Pmode)));
- emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
-
-#ifdef HAVE_setjmp
- if (HAVE_setjmp)
- emit_insn (gen_setjmp ());
-#endif
-
- /* Set TARGET to zero and branch around the other case. */
- emit_move_insn (target, const0_rtx);
- emit_jump_insn (gen_jump (lab2));
- emit_barrier ();
- emit_label (lab1);
-
- /* Note that setjmp clobbers FP when we get here, so we have to
- make sure it's marked as used by this function. */
- emit_insn (gen_rtx (USE, VOIDmode, hard_frame_pointer_rtx));
-
- /* Mark the static chain as clobbered here so life information
- doesn't get messed up for it. */
- emit_insn (gen_rtx (CLOBBER, VOIDmode, static_chain_rtx));
-
- /* Now put in the code to restore the frame pointer, and argument
- pointer, if needed. The code below is from expand_end_bindings
- in stmt.c; see detailed documentation there. */
-#ifdef HAVE_nonlocal_goto
- if (! HAVE_nonlocal_goto)
-#endif
- emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
-
- current_function_has_nonlocal_goto = 1;
-
-#if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
- if (fixed_regs[ARG_POINTER_REGNUM])
- {
-#ifdef ELIMINABLE_REGS
- static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
-
- for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
- if (elim_regs[i].from == ARG_POINTER_REGNUM
- && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
- break;
-
- if (i == sizeof elim_regs / sizeof elim_regs [0])
-#endif
- {
- /* Now restore our arg pointer from the address at which it
- was saved in our stack frame.
- If there hasn't be space allocated for it yet, make
- some now. */
- if (arg_pointer_save_area == 0)
- arg_pointer_save_area
- = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
- emit_move_insn (virtual_incoming_args_rtx,
- copy_to_reg (arg_pointer_save_area));
- }
- }
-#endif
-
-#ifdef HAVE_nonlocal_goto_receiver
- if (HAVE_nonlocal_goto_receiver)
- emit_insn (gen_nonlocal_goto_receiver ());
-#endif
- /* The static chain pointer contains the address of dummy function.
- We need to call it here to handle some PIC cases of restoring
- a global pointer. Then return 1. */
- op0 = copy_to_mode_reg (Pmode, static_chain_rtx);
-
- /* We can't actually call emit_library_call here, so do everything
- it does, which isn't much for a libfunc with no args. */
- op0 = memory_address (FUNCTION_MODE, op0);
-
- INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE,
- gen_rtx (SYMBOL_REF, Pmode, "__dummy"), 1);
- next_arg_reg = FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1);
-
-#ifndef ACCUMULATE_OUTGOING_ARGS
-#ifdef HAVE_call_pop
- if (HAVE_call_pop)
- emit_call_insn (gen_call_pop (gen_rtx (MEM, FUNCTION_MODE, op0),
- const0_rtx, next_arg_reg,
- GEN_INT (return_pops)));
- else
-#endif
-#endif
-
-#ifdef HAVE_call
- if (HAVE_call)
- emit_call_insn (gen_call (gen_rtx (MEM, FUNCTION_MODE, op0),
- const0_rtx, next_arg_reg, const0_rtx));
- else
-#endif
- abort ();
-
- emit_move_insn (target, const1_rtx);
- emit_label (lab2);
- return target;
+ return expand_builtin_setjmp (buf_addr, target);
}
/* __builtin_longjmp is passed a pointer to an array of five words
pending_stack_adjust = 0;
}
}
-
-/* Defer the expansion all cleanups up to OLD_CLEANUPS.
- Returns the cleanups to be performed. */
-
-static tree
-defer_cleanups_to (old_cleanups)
- tree old_cleanups;
-{
- tree new_cleanups = NULL_TREE;
- tree cleanups = cleanups_this_call;
- tree last = NULL_TREE;
-
- while (cleanups_this_call != old_cleanups)
- {
- expand_eh_region_end (TREE_VALUE (cleanups_this_call));
- last = cleanups_this_call;
- cleanups_this_call = TREE_CHAIN (cleanups_this_call);
- }
-
- if (last)
- {
- /* Remove the list from the chain of cleanups. */
- TREE_CHAIN (last) = NULL_TREE;
-
- /* reverse them so that we can build them in the right order. */
- cleanups = nreverse (cleanups);
-
- /* All cleanups must be on the function_obstack. */
- push_obstacks_nochange ();
- resume_temporary_allocation ();
-
- while (cleanups)
- {
- if (new_cleanups)
- new_cleanups = build (COMPOUND_EXPR, TREE_TYPE (new_cleanups),
- TREE_VALUE (cleanups), new_cleanups);
- else
- new_cleanups = TREE_VALUE (cleanups);
-
- cleanups = TREE_CHAIN (cleanups);
- }
-
- pop_obstacks ();
- }
-
- return new_cleanups;
-}
-
-/* Expand all cleanups up to OLD_CLEANUPS.
- Needed here, and also for language-dependent calls. */
-
-void
-expand_cleanups_to (old_cleanups)
- tree old_cleanups;
-{
- while (cleanups_this_call != old_cleanups)
- {
- expand_eh_region_end (TREE_VALUE (cleanups_this_call));
- expand_expr (TREE_VALUE (cleanups_this_call), const0_rtx, VOIDmode, 0);
- cleanups_this_call = TREE_CHAIN (cleanups_this_call);
- }
-}
\f
/* Expand conditional expressions. */
break;
case TRUTH_ANDIF_EXPR:
- {
- rtx seq1, seq2;
- tree cleanups, old_cleanups;
-
- if (if_false_label == 0)
- if_false_label = drop_through_label = gen_label_rtx ();
- start_sequence ();
- do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
- seq1 = get_insns ();
- end_sequence ();
-
- old_cleanups = cleanups_this_call;
- start_sequence ();
- do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
- seq2 = get_insns ();
- cleanups = defer_cleanups_to (old_cleanups);
- end_sequence ();
-
- if (cleanups)
- {
- rtx flag = gen_reg_rtx (word_mode);
- tree new_cleanups;
- tree cond;
-
- /* Flag cleanups as not needed. */
- emit_move_insn (flag, const0_rtx);
- emit_insns (seq1);
-
- /* Flag cleanups as needed. */
- emit_move_insn (flag, const1_rtx);
- emit_insns (seq2);
-
- /* All cleanups must be on the function_obstack. */
- push_obstacks_nochange ();
- resume_temporary_allocation ();
-
- /* convert flag, which is an rtx, into a tree. */
- cond = make_node (RTL_EXPR);
- TREE_TYPE (cond) = integer_type_node;
- RTL_EXPR_RTL (cond) = flag;
- RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
- cond = save_expr (cond);
-
- new_cleanups = build (COND_EXPR, void_type_node,
- truthvalue_conversion (cond),
- cleanups, integer_zero_node);
- new_cleanups = fold (new_cleanups);
-
- pop_obstacks ();
-
- /* Now add in the conditionalized cleanups. */
- cleanups_this_call
- = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
- expand_eh_region_start ();
- }
- else
- {
- emit_insns (seq1);
- emit_insns (seq2);
- }
- }
+ if (if_false_label == 0)
+ if_false_label = drop_through_label = gen_label_rtx ();
+ do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
+ start_cleanup_deferal ();
+ do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
+ end_cleanup_deferal ();
break;
case TRUTH_ORIF_EXPR:
- {
- rtx seq1, seq2;
- tree cleanups, old_cleanups;
-
- if (if_true_label == 0)
- if_true_label = drop_through_label = gen_label_rtx ();
- start_sequence ();
- do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
- seq1 = get_insns ();
- end_sequence ();
-
- old_cleanups = cleanups_this_call;
- start_sequence ();
- do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
- seq2 = get_insns ();
- cleanups = defer_cleanups_to (old_cleanups);
- end_sequence ();
-
- if (cleanups)
- {
- rtx flag = gen_reg_rtx (word_mode);
- tree new_cleanups;
- tree cond;
-
- /* Flag cleanups as not needed. */
- emit_move_insn (flag, const0_rtx);
- emit_insns (seq1);
-
- /* Flag cleanups as needed. */
- emit_move_insn (flag, const1_rtx);
- emit_insns (seq2);
-
- /* All cleanups must be on the function_obstack. */
- push_obstacks_nochange ();
- resume_temporary_allocation ();
-
- /* convert flag, which is an rtx, into a tree. */
- cond = make_node (RTL_EXPR);
- TREE_TYPE (cond) = integer_type_node;
- RTL_EXPR_RTL (cond) = flag;
- RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
- cond = save_expr (cond);
-
- new_cleanups = build (COND_EXPR, void_type_node,
- truthvalue_conversion (cond),
- cleanups, integer_zero_node);
- new_cleanups = fold (new_cleanups);
-
- pop_obstacks ();
-
- /* Now add in the conditionalized cleanups. */
- cleanups_this_call
- = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
- expand_eh_region_start ();
- }
- else
- {
- emit_insns (seq1);
- emit_insns (seq2);
- }
- }
+ if (if_true_label == 0)
+ if_true_label = drop_through_label = gen_label_rtx ();
+ do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
+ start_cleanup_deferal ();
+ do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
+ end_cleanup_deferal ();
break;
case COMPOUND_EXPR:
tree type;
tree offset;
int volatilep = 0;
+ int alignment;
/* Get description of this reference. We don't actually care
about the underlying object here. */
get_inner_reference (exp, &bitsize, &bitpos, &offset,
- &mode, &unsignedp, &volatilep);
+ &mode, &unsignedp, &volatilep,
+ &alignment);
type = type_for_size (bitsize, unsignedp);
if (! SLOW_BYTE_ACCESS
else
{
- rtx seq1, seq2;
- tree cleanups_left_side, cleanups_right_side, old_cleanups;
-
register rtx label1 = gen_label_rtx ();
drop_through_label = gen_label_rtx ();
do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
- /* We need to save the cleanups for the lhs and rhs separately.
- Keep track of the cleanups seen before the lhs. */
- old_cleanups = cleanups_this_call;
- start_sequence ();
+ start_cleanup_deferal ();
/* Now the THEN-expression. */
do_jump (TREE_OPERAND (exp, 1),
if_false_label ? if_false_label : drop_through_label,
/* In case the do_jump just above never jumps. */
do_pending_stack_adjust ();
emit_label (label1);
- seq1 = get_insns ();
- /* Now grab the cleanups for the lhs. */
- cleanups_left_side = defer_cleanups_to (old_cleanups);
- end_sequence ();
- /* And keep track of where we start before the rhs. */
- old_cleanups = cleanups_this_call;
- start_sequence ();
/* Now the ELSE-expression. */
do_jump (TREE_OPERAND (exp, 2),
if_false_label ? if_false_label : drop_through_label,
if_true_label ? if_true_label : drop_through_label);
- seq2 = get_insns ();
- /* Grab the cleanups for the rhs. */
- cleanups_right_side = defer_cleanups_to (old_cleanups);
- end_sequence ();
-
- if (cleanups_left_side || cleanups_right_side)
- {
- /* Make the cleanups for the THEN and ELSE clauses
- conditional based on which half is executed. */
- rtx flag = gen_reg_rtx (word_mode);
- tree new_cleanups;
- tree cond;
-
- /* Set the flag to 0 so that we know we executed the lhs. */
- emit_move_insn (flag, const0_rtx);
- emit_insns (seq1);
-
- /* Set the flag to 1 so that we know we executed the rhs. */
- emit_move_insn (flag, const1_rtx);
- emit_insns (seq2);
-
- /* Make sure the cleanup lives on the function_obstack. */
- push_obstacks_nochange ();
- resume_temporary_allocation ();
-
- /* Now, build up a COND_EXPR that tests the value of the
- flag, and then either do the cleanups for the lhs or the
- rhs. */
- cond = make_node (RTL_EXPR);
- TREE_TYPE (cond) = integer_type_node;
- RTL_EXPR_RTL (cond) = flag;
- RTL_EXPR_SEQUENCE (cond) = NULL_RTX;
- cond = save_expr (cond);
-
- new_cleanups = build (COND_EXPR, void_type_node,
- truthvalue_conversion (cond),
- cleanups_right_side, cleanups_left_side);
- new_cleanups = fold (new_cleanups);
-
- pop_obstacks ();
-
- /* Now add in the conditionalized cleanups. */
- cleanups_this_call
- = tree_cons (NULL_TREE, new_cleanups, cleanups_this_call);
- expand_eh_region_start ();
- }
- else
- {
- /* No cleanups were needed, so emit the two sequences
- directly. */
- emit_insns (seq1);
- emit_insns (seq2);
- }
+ end_cleanup_deferal ();
}
break;
else
abort ();
else
- /* See corresponding comment in bc_store_memory(). */
+ /* See corresponding comment in bc_store_memory. */
if (TYPE_MODE (type) == BLKmode
|| TYPE_MODE (type) == VOIDmode)
return;
/* Load: sign-extend if signed, else zero-extend */
bc_emit_instruction (unsignedp ? zxloadBI : sxloadBI);
-}
+}
/* Adjust interpreter stack by NLEVELS. Positive means drop NLEVELS