#include "langhooks.h"
#include "intl.h"
#include "tm_p.h"
+#include "tree-iterator.h"
+#include "tree-pass.h"
+#include "tree-flow.h"
#include "target.h"
+#include "timevar.h"
/* Decide whether a function's arguments should be processed
from first to last or from last to first.
static tree clear_storage_libcall_fn (int);
static rtx compress_float_constant (rtx, rtx);
static rtx get_subtarget (rtx);
-static int is_zeros_p (tree);
static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
HOST_WIDE_INT, enum machine_mode,
tree, tree, int, int);
enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
-/* Stack of EXPR_WITH_FILE_LOCATION nested expressions. */
-struct file_stack *expr_wfl_stack;
-
/* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
#ifndef SLOW_UNALIGNED_ACCESS
if (to_real != from_real)
abort ();
+ /* If the source and destination are already the same, then there's
+ nothing to do. */
+ if (to == from)
+ return;
+
/* If FROM is a SUBREG that indicates that we have already done at least
the required extension, strip it. We don't handle such SUBREGs as
TO here. */
&& ((code = can_extend_p (to_mode, word_mode, unsignedp))
!= CODE_FOR_nothing))
{
- if (GET_CODE (to) == REG)
+ if (REG_P (to))
{
if (reg_overlap_mentioned_p (to, from))
from = force_reg (from_mode, from);
&& ! MEM_VOLATILE_P (from)
&& direct_load[(int) to_mode]
&& ! mode_dependent_address_p (XEXP (from, 0)))
- || GET_CODE (from) == REG
+ || REG_P (from)
|| GET_CODE (from) == SUBREG))
from = force_reg (from_mode, from);
convert_move (to, gen_lowpart (word_mode, from), 0);
&& ! MEM_VOLATILE_P (from)
&& direct_load[(int) to_mode]
&& ! mode_dependent_address_p (XEXP (from, 0)))
- || GET_CODE (from) == REG
+ || REG_P (from)
|| GET_CODE (from) == SUBREG))
from = force_reg (from_mode, from);
- if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
+ if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
&& ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
from = copy_to_reg (from);
emit_move_insn (to, gen_lowpart (to_mode, from));
|| (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
&& ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
&& direct_load[(int) mode])
- || (GET_CODE (x) == REG
+ || (REG_P (x)
&& (! HARD_REGISTER_P (x)
|| HARD_REGNO_MODE_OK (REGNO (x), mode))
&& TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
y_addr = force_operand (XEXP (y, 0), NULL_RTX);
do_pending_stack_adjust ();
- emit_note (NOTE_INSN_LOOP_BEG);
-
emit_jump (cmp_label);
emit_label (top_label);
if (tmp != iter)
emit_move_insn (iter, tmp);
- emit_note (NOTE_INSN_LOOP_CONT);
emit_label (cmp_label);
emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
true, top_label);
-
- emit_note (NOTE_INSN_LOOP_END);
}
\f
/* Copy all or part of a value X into registers starting at REGNO.
to be extracted. */
tmps[i] = XEXP (src, bytepos / slen0);
if (! CONSTANT_P (tmps[i])
- && (GET_CODE (tmps[i]) != REG || GET_MODE (tmps[i]) != mode))
+ && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
(bytepos % slen0) * BITS_PER_UNIT,
1, NULL_RTX, mode, mode, ssize);
SIMD register, which is currently broken. While we get GCC
to emit proper RTL for these cases, let's dump to memory. */
else if (VECTOR_MODE_P (GET_MODE (dst))
- && GET_CODE (src) == REG)
+ && REG_P (src))
{
int slen = GET_MODE_SIZE (GET_MODE (src));
rtx mem;
&& XVECLEN (dst, 0) > 1)
tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
else if (CONSTANT_P (src)
- || (GET_CODE (src) == REG && GET_MODE (src) == mode))
+ || (REG_P (src) && GET_MODE (src) == mode))
tmps[i] = src;
else
tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
void
use_reg (rtx *call_fusage, rtx reg)
{
- if (GET_CODE (reg) != REG
+ if (!REG_P (reg)
|| REGNO (reg) >= FIRST_PSEUDO_REGISTER)
abort ();
/* A NULL entry means the parameter goes both on the stack and in
registers. This can also be a MEM for targets that pass values
partially on the stack and partially in registers. */
- if (reg != 0 && GET_CODE (reg) == REG)
+ if (reg != 0 && REG_P (reg))
use_reg (call_fusage, reg);
}
}
if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
abort ();
- /* Never force constant_p_rtx to memory. */
- if (GET_CODE (y) == CONSTANT_P_RTX)
- ;
- else if (CONSTANT_P (y))
+ if (CONSTANT_P (y))
{
if (optimize
&& SCALAR_FLOAT_MODE_P (GET_MODE (x))
last_insn = emit_move_insn_1 (x, y);
- if (y_cst && GET_CODE (x) == REG
+ if (y_cst && REG_P (x)
&& (set = single_set (last_insn)) != NULL_RTX
&& SET_DEST (set) == x
&& ! rtx_equal_p (y_cst, SET_SRC (set)))
GET_MODE_SIZE (mode), 0);
rtx cmem = adjust_address (mem, mode, 0);
- cfun->cannot_inline
- = N_("function using short complex types cannot be inline");
-
if (packed_dest_p)
{
rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
emit_unop_insn (ic, x, trunc_y, UNKNOWN);
last_insn = get_last_insn ();
- if (GET_CODE (x) == REG)
+ if (REG_P (x))
set_unique_reg_note (last_insn, REG_EQUAL, y);
return last_insn;
size = convert_modes (Pmode, ptr_mode, size, 1);
if (CONSTANT_P (size))
anti_adjust_stack (plus_constant (size, extra));
- else if (GET_CODE (size) == REG && extra == 0)
+ else if (REG_P (size) && extra == 0)
anti_adjust_stack (size);
else
{
/* If X is a hard register in a non-integer mode, copy it into a pseudo;
SUBREGs of such registers are not allowed. */
- if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
+ if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
&& GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
x = copy_to_reg (x);
{
return ((x == 0
/* Only registers can be subtargets. */
- || GET_CODE (x) != REG
+ || !REG_P (x)
/* If the register is readonly, it can't be set more than once. */
|| RTX_UNCHANGING_P (x)
/* Don't use hard regs to avoid extending their life. */
if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
&& TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
&& ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
- && GET_CODE (DECL_RTL (to)) == REG))
+ && REG_P (DECL_RTL (to))))
{
rtx value;
/* Don't move directly into a return register. */
if (TREE_CODE (to) == RESULT_DECL
- && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
+ && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
{
rtx temp;
Otherwise, if TEMP is not TARGET, return TEMP
if it is constant (for efficiency),
or if we really want the correct value. */
- if (!(target && GET_CODE (target) == REG
+ if (!(target && REG_P (target)
&& REGNO (target) < FIRST_PSEUDO_REGISTER)
&& !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
&& ! rtx_equal_p (temp, target)
/* Return TARGET itself if it is a hard register. */
else if ((want_value & 1) != 0
&& GET_MODE (target) != BLKmode
- && ! (GET_CODE (target) == REG
+ && ! (REG_P (target)
&& REGNO (target) < FIRST_PSEUDO_REGISTER))
return copy_to_reg (target);
return target;
}
\f
-/* Return 1 if EXP just contains zeros. FIXME merge with initializer_zerop. */
+/* Examine CTOR. Discover how many scalar fields are set to nonzero
+ values and place it in *P_NZ_ELTS. Discover how many scalar fields
+ are set to non-constant values and place it in *P_NC_ELTS. */
-static int
-is_zeros_p (tree exp)
+static void
+categorize_ctor_elements_1 (tree ctor, HOST_WIDE_INT *p_nz_elts,
+ HOST_WIDE_INT *p_nc_elts)
{
- tree elt;
+ HOST_WIDE_INT nz_elts, nc_elts;
+ tree list;
- switch (TREE_CODE (exp))
+ nz_elts = 0;
+ nc_elts = 0;
+
+ for (list = CONSTRUCTOR_ELTS (ctor); list; list = TREE_CHAIN (list))
{
- case CONVERT_EXPR:
- case NOP_EXPR:
- case NON_LVALUE_EXPR:
- case VIEW_CONVERT_EXPR:
- return is_zeros_p (TREE_OPERAND (exp, 0));
+ tree value = TREE_VALUE (list);
+ tree purpose = TREE_PURPOSE (list);
+ HOST_WIDE_INT mult;
- case INTEGER_CST:
- return integer_zerop (exp);
+ mult = 1;
+ if (TREE_CODE (purpose) == RANGE_EXPR)
+ {
+ tree lo_index = TREE_OPERAND (purpose, 0);
+ tree hi_index = TREE_OPERAND (purpose, 1);
- case COMPLEX_CST:
- return
- is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
+ if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
+ mult = (tree_low_cst (hi_index, 1)
+ - tree_low_cst (lo_index, 1) + 1);
+ }
- case REAL_CST:
- return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
+ switch (TREE_CODE (value))
+ {
+ case CONSTRUCTOR:
+ {
+ HOST_WIDE_INT nz = 0, nc = 0;
+ categorize_ctor_elements_1 (value, &nz, &nc);
+ nz_elts += mult * nz;
+ nc_elts += mult * nc;
+ }
+ break;
- case VECTOR_CST:
- for (elt = TREE_VECTOR_CST_ELTS (exp); elt;
- elt = TREE_CHAIN (elt))
- if (!is_zeros_p (TREE_VALUE (elt)))
- return 0;
+ case INTEGER_CST:
+ case REAL_CST:
+ if (!initializer_zerop (value))
+ nz_elts += mult;
+ break;
+ case COMPLEX_CST:
+ if (!initializer_zerop (TREE_REALPART (value)))
+ nz_elts += mult;
+ if (!initializer_zerop (TREE_IMAGPART (value)))
+ nz_elts += mult;
+ break;
+ case VECTOR_CST:
+ {
+ tree v;
+ for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
+ if (!initializer_zerop (TREE_VALUE (v)))
+ nz_elts += mult;
+ }
+ break;
- return 1;
+ default:
+ nz_elts += mult;
+ if (!initializer_constant_valid_p (value, TREE_TYPE (value)))
+ nc_elts += mult;
+ break;
+ }
+ }
- case CONSTRUCTOR:
- if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
- return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
- for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
- if (! is_zeros_p (TREE_VALUE (elt)))
- return 0;
+ *p_nz_elts += nz_elts;
+ *p_nc_elts += nc_elts;
+}
+
+void
+categorize_ctor_elements (tree ctor, HOST_WIDE_INT *p_nz_elts,
+ HOST_WIDE_INT *p_nc_elts)
+{
+ *p_nz_elts = 0;
+ *p_nc_elts = 0;
+ categorize_ctor_elements_1 (ctor, p_nz_elts, p_nc_elts);
+}
+
+/* Count the number of scalars in TYPE. Return -1 on overflow or
+ variable-sized. */
+
+HOST_WIDE_INT
+count_type_elements (tree type)
+{
+ const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
+ switch (TREE_CODE (type))
+ {
+ case ARRAY_TYPE:
+ {
+ tree telts = array_type_nelts (type);
+ if (telts && host_integerp (telts, 1))
+ {
+ HOST_WIDE_INT n = tree_low_cst (telts, 1);
+ HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type));
+ if (n == 0)
+ return 0;
+ if (max / n < m)
+ return n * m;
+ }
+ return -1;
+ }
+
+ case RECORD_TYPE:
+ {
+ HOST_WIDE_INT n = 0, t;
+ tree f;
+
+ for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
+ if (TREE_CODE (f) == FIELD_DECL)
+ {
+ t = count_type_elements (TREE_TYPE (f));
+ if (t < 0)
+ return -1;
+ n += t;
+ }
+
+ return n;
+ }
+ case UNION_TYPE:
+ case QUAL_UNION_TYPE:
+ {
+ /* Ho hum. How in the world do we guess here? Clearly it isn't
+ right to count the fields. Guess based on the number of words. */
+ HOST_WIDE_INT n = int_size_in_bytes (type);
+ if (n < 0)
+ return -1;
+ return n / UNITS_PER_WORD;
+ }
+
+ case COMPLEX_TYPE:
+ return 2;
+
+ case VECTOR_TYPE:
+ /* ??? This is broke. We should encode the vector width in the tree. */
+ return GET_MODE_NUNITS (TYPE_MODE (type));
+
+ case INTEGER_TYPE:
+ case REAL_TYPE:
+ case ENUMERAL_TYPE:
+ case BOOLEAN_TYPE:
+ case CHAR_TYPE:
+ case POINTER_TYPE:
+ case OFFSET_TYPE:
+ case REFERENCE_TYPE:
return 1;
+ case VOID_TYPE:
+ case METHOD_TYPE:
+ case FILE_TYPE:
+ case SET_TYPE:
+ case FUNCTION_TYPE:
+ case LANG_TYPE:
default:
- return 0;
+ abort ();
}
}
mostly_zeros_p (tree exp)
{
if (TREE_CODE (exp) == CONSTRUCTOR)
+
{
- int elts = 0, zeros = 0;
- tree elt = CONSTRUCTOR_ELTS (exp);
+ HOST_WIDE_INT nz_elts, nc_elts, elts;
+
+ /* If there are no ranges of true bits, it is all zero. */
if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
- {
- /* If there are no ranges of true bits, it is all zero. */
- return elt == NULL_TREE;
- }
- for (; elt; elt = TREE_CHAIN (elt))
- {
- /* We do not handle the case where the index is a RANGE_EXPR,
- so the statistic will be somewhat inaccurate.
- We do make a more accurate count in store_constructor itself,
- so since this function is only used for nested array elements,
- this should be close enough. */
- if (mostly_zeros_p (TREE_VALUE (elt)))
- zeros++;
- elts++;
- }
+ return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
- return 4 * zeros >= 3 * elts;
+ categorize_ctor_elements (exp, &nz_elts, &nc_elts);
+ elts = count_type_elements (TREE_TYPE (exp));
+
+ return nz_elts < elts / 4;
}
- return is_zeros_p (exp);
+ return initializer_zerop (exp);
}
\f
/* Helper function for store_constructor.
set the initial value as zero so we can fold the value into
a constant. But if more than one register is involved,
this probably loses. */
- else if (GET_CODE (target) == REG && TREE_STATIC (exp)
+ else if (REG_P (target) && TREE_STATIC (exp)
&& GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
{
emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
clear the whole structure first. Don't do this if TARGET is a
register whose mode size isn't equal to SIZE since clear_storage
can't handle this case. */
- else if (((list_length (CONSTRUCTOR_ELTS (exp)) != fields_length (type))
- || mostly_zeros_p (exp))
- && (GET_CODE (target) != REG
+ else if (size > 0
+ && ((list_length (CONSTRUCTOR_ELTS (exp)) != fields_length (type))
+ || mostly_zeros_p (exp))
+ && (!REG_P (target)
|| ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
== size)))
{
if (field == 0)
continue;
- if (cleared && is_zeros_p (value))
+ if (cleared && initializer_zerop (value))
continue;
if (host_integerp (DECL_SIZE (field), 1))
start of a word, try to widen it to a full word.
This special case allows us to output C++ member function
initializations in a form that the optimizers can understand. */
- if (GET_CODE (target) == REG
+ if (REG_P (target)
&& bitsize < BITS_PER_WORD
&& bitpos % BITS_PER_WORD == 0
&& GET_MODE_CLASS (mode) == MODE_INT
/* If the constructor has fewer elements than the array,
clear the whole array first. Similarly if this is
static constructor of a non-BLKmode object. */
- if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
+ if (cleared || (REG_P (target) && TREE_STATIC (exp)))
need_to_clear = 1;
else
{
tree index = TREE_PURPOSE (elt);
rtx xtarget = target;
- if (cleared && is_zeros_p (value))
+ if (cleared && initializer_zerop (value))
continue;
unsignedp = TYPE_UNSIGNED (elttype);
{
tree lo_index = TREE_OPERAND (index, 0);
tree hi_index = TREE_OPERAND (index, 1);
- rtx index_r, pos_rtx, loop_end;
- struct nesting *loop;
+ rtx index_r, pos_rtx;
HOST_WIDE_INT lo, hi, count;
tree position;
}
else
{
- expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
- loop_end = gen_label_rtx ();
+ rtx loop_start = gen_label_rtx ();
+ rtx loop_end = gen_label_rtx ();
+ tree exit_cond;
+ expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
unsignedp = TYPE_UNSIGNED (domain);
index = build_decl (VAR_DECL, NULL_TREE, domain);
emit_queue ();
}
store_expr (lo_index, index_r, 0);
- loop = expand_start_loop (0);
+
+ /* Build the head of the loop. */
+ do_pending_stack_adjust ();
+ emit_queue ();
+ emit_label (loop_start);
/* Assign value to element index. */
position
else
store_expr (value, xtarget, 0);
- expand_exit_loop_if_false (loop,
- build (LT_EXPR, integer_type_node,
- index, hi_index));
+ /* Generate a conditional jump to exit the loop. */
+ exit_cond = build (LT_EXPR, integer_type_node,
+ index, hi_index);
+ jumpif (exit_cond, loop_end);
+ /* Update the loop counter, and jump to the head of
+ the loop. */
expand_increment (build (PREINCREMENT_EXPR,
TREE_TYPE (index),
index, integer_one_node), 0, 0);
- expand_end_loop ();
+ emit_jump (loop_start);
+
+ /* Build the end of the loop. */
emit_label (loop_end);
}
}
twice, once with emit_move_insn and once via store_field. */
if (mode == BLKmode
- && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
+ && (REG_P (target) || GET_CODE (target) == SUBREG))
{
rtx object = assign_temp (type, 0, 1, 1);
rtx blk_object = adjust_address (object, BLKmode, 0);
if (bitpos != 0)
abort ();
- return store_expr (exp, target, 0);
+ return store_expr (exp, target, value_mode != VOIDmode);
}
/* If the structure is in a register or if the component
|| (mode != BLKmode && ! direct_store[(int) mode]
&& GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
&& GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
- || GET_CODE (target) == REG
+ || REG_P (target)
|| GET_CODE (target) == SUBREG
/* If the field isn't aligned enough to store as an ordinary memref,
store it as a bit field. */
/* If a value is wanted, it must be the lhs;
so make the address stable for multiple use. */
- if (value_mode != VOIDmode && GET_CODE (addr) != REG
+ if (value_mode != VOIDmode && !REG_P (addr)
&& ! CONSTANT_ADDRESS_P (addr)
/* A frame-pointer reference is already stable. */
&& ! (GET_CODE (addr) == PLUS
/* Check for subreg applied to an expression produced by loop optimizer. */
if (code == SUBREG
- && GET_CODE (SUBREG_REG (value)) != REG
+ && !REG_P (SUBREG_REG (value))
&& GET_CODE (SUBREG_REG (value)) != MEM)
{
value = simplify_gen_subreg (GET_MODE (value),
if (ARITHMETIC_P (value))
{
op2 = XEXP (value, 1);
- if (!CONSTANT_P (op2) && !(GET_CODE (op2) == REG && op2 != subtarget))
+ if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
subtarget = 0;
if (code == MINUS && GET_CODE (op2) == CONST_INT)
{
creating another one around this addition. */
if (code == PLUS && GET_CODE (op2) == CONST_INT
&& GET_CODE (XEXP (value, 0)) == PLUS
- && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
+ && REG_P (XEXP (XEXP (value, 0), 0))
&& REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
&& REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
{
if (GET_CODE (x) == SUBREG)
{
x = SUBREG_REG (x);
- if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
+ if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
return 0;
}
case CALL_EXPR:
/* Assume that the call will clobber all hard registers and
all of memory. */
- if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
+ if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
|| GET_CODE (x) == MEM)
return 0;
break;
if (GET_CODE (exp_rtl) == SUBREG)
{
exp_rtl = SUBREG_REG (exp_rtl);
- if (GET_CODE (exp_rtl) == REG
+ if (REG_P (exp_rtl)
&& REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
return 0;
}
return MAX (factor, target_align);
}
\f
+/* Expands variable VAR. */
+
+void
+expand_var (tree var)
+{
+ if (DECL_EXTERNAL (var))
+ return;
+
+ if (TREE_STATIC (var))
+ /* If this is an inlined copy of a static local variable,
+ look up the original decl. */
+ var = DECL_ORIGIN (var);
+
+ if (TREE_STATIC (var)
+ ? !TREE_ASM_WRITTEN (var)
+ : !DECL_RTL_SET_P (var))
+ {
+ if (TREE_CODE (var) == VAR_DECL && DECL_DEFER_OUTPUT (var))
+ {
+ /* Prepare a mem & address for the decl. */
+ rtx x;
+
+ if (TREE_STATIC (var))
+ abort ();
+
+ x = gen_rtx_MEM (DECL_MODE (var),
+ gen_reg_rtx (Pmode));
+
+ set_mem_attributes (x, var, 1);
+ SET_DECL_RTL (var, x);
+ }
+ else if (lang_hooks.expand_decl (var))
+ /* OK. */;
+ else if (TREE_CODE (var) == VAR_DECL && !TREE_STATIC (var))
+ expand_decl (var);
+ else if (TREE_CODE (var) == VAR_DECL && TREE_STATIC (var))
+ rest_of_decl_compilation (var, NULL, 0, 0);
+ else if (TREE_CODE (var) == TYPE_DECL
+ || TREE_CODE (var) == CONST_DECL
+ || TREE_CODE (var) == FUNCTION_DECL
+ || TREE_CODE (var) == LABEL_DECL)
+ /* No expansion needed. */;
+ else
+ abort ();
+ }
+}
+
+/* Expands declarations of variables in list VARS. */
+
+static void
+expand_vars (tree vars)
+{
+ for (; vars; vars = TREE_CHAIN (vars))
+ {
+ tree var = vars;
+
+ if (DECL_EXTERNAL (var))
+ continue;
+
+ expand_var (var);
+ expand_decl_init (var);
+ }
+}
+
/* Subroutine of expand_expr. Expand the two operands of a binary
expression EXP0 and EXP1 placing the results in OP0 and OP1.
The value may be stored in TARGET if TARGET is nonzero. The
COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
recursively. */
+static rtx expand_expr_real_1 (tree, rtx, enum machine_mode,
+ enum expand_modifier, rtx *);
+
rtx
expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
enum expand_modifier modifier, rtx *alt_rtl)
{
+ int rn = -1;
+ rtx ret, last = NULL;
+
+ /* Handle ERROR_MARK before anybody tries to access its type. */
+ if (TREE_CODE (exp) == ERROR_MARK
+ || TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK)
+ {
+ ret = CONST0_RTX (tmode);
+ return ret ? ret : const0_rtx;
+ }
+
+ if (flag_non_call_exceptions)
+ {
+ rn = lookup_stmt_eh_region (exp);
+ /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */
+ if (rn >= 0)
+ last = get_last_insn ();
+ }
+
+ /* If this is an expression of some kind and it has an associated line
+ number, then emit the line number before expanding the expression.
+
+ We need to save and restore the file and line information so that
+ errors discovered during expansion are emitted with the right
+ information. It would be better of the diagnostic routines
+ used the file/line information embedded in the tree nodes rather
+ than globals. */
+ if (cfun && EXPR_HAS_LOCATION (exp))
+ {
+ location_t saved_location = input_location;
+ input_location = EXPR_LOCATION (exp);
+ emit_line_note (input_location);
+
+ /* Record where the insns produced belong. */
+ if (cfun->dont_emit_block_notes)
+ record_block_change (TREE_BLOCK (exp));
+
+ ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
+
+ input_location = saved_location;
+ }
+ else
+ {
+ ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
+ }
+
+ /* If using non-call exceptions, mark all insns that may trap.
+ expand_call() will mark CALL_INSNs before we get to this code,
+ but it doesn't handle libcalls, and these may trap. */
+ if (rn >= 0)
+ {
+ rtx insn;
+ for (insn = next_real_insn (last); insn;
+ insn = next_real_insn (insn))
+ {
+ if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
+ /* If we want exceptions for non-call insns, any
+ may_trap_p instruction may throw. */
+ && GET_CODE (PATTERN (insn)) != CLOBBER
+ && GET_CODE (PATTERN (insn)) != USE
+ && (GET_CODE (insn) == CALL_INSN || may_trap_p (PATTERN (insn))))
+ {
+ REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
+ REG_NOTES (insn));
+ }
+ }
+ }
+
+ return ret;
+}
+
+static rtx
+expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
+ enum expand_modifier modifier, rtx *alt_rtl)
+{
rtx op0, op1, temp;
tree type = TREE_TYPE (exp);
int unsignedp;
int ignore;
tree context;
- /* Handle ERROR_MARK before anybody tries to access its type. */
- if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
- {
- op0 = CONST0_RTX (tmode);
- if (op0 != 0)
- return op0;
- return const0_rtx;
- }
-
mode = TYPE_MODE (type);
unsignedp = TYPE_UNSIGNED (type);
Another is a CALL_EXPR which must return in memory. */
if (! cse_not_expected && mode != BLKmode && target
- && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
+ && (!REG_P (target) || REGNO (target) < FIRST_PSEUDO_REGISTER)
&& ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
&& ! (code == CALL_EXPR && aggregate_value_p (exp, exp)))
target = 0;
case LABEL_DECL:
{
tree function = decl_function_context (exp);
- /* Labels in containing functions, or labels used from initializers,
- must be forced. */
- if (modifier == EXPAND_INITIALIZER
- || (function != current_function_decl
- && function != inline_function_decl
- && function != 0))
- temp = force_label_rtx (exp);
- else
- temp = label_rtx (exp);
- temp = gen_rtx_MEM (FUNCTION_MODE, gen_rtx_LABEL_REF (Pmode, temp));
+ temp = label_rtx (exp);
+ temp = gen_rtx_LABEL_REF (Pmode, temp);
+
if (function != current_function_decl
- && function != inline_function_decl && function != 0)
- LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
+ && function != 0)
+ LABEL_REF_NONLOCAL_P (temp) = 1;
+
+ temp = gen_rtx_MEM (FUNCTION_MODE, temp);
return temp;
}
/* Handle variables inherited from containing functions. */
context = decl_function_context (exp);
- /* We treat inline_function_decl as an alias for the current function
- because that is the inline function whose vars, types, etc.
- are being merged into the current function.
- See expand_inline_function. */
-
if (context != 0 && context != current_function_decl
- && context != inline_function_decl
/* If var is static, we don't need a static chain to access it. */
&& ! (GET_CODE (DECL_RTL (exp)) == MEM
&& CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
See expand_decl. */
else if (GET_CODE (DECL_RTL (exp)) == MEM
- && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
+ && REG_P (XEXP (DECL_RTL (exp), 0)))
temp = validize_mem (DECL_RTL (exp));
/* If DECL_RTL is memory, we are in the normal case and either
&& (! memory_address_p (DECL_MODE (exp),
XEXP (DECL_RTL (exp), 0))
|| (flag_force_addr
- && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
+ && !REG_P (XEXP (DECL_RTL (exp), 0)))))
{
if (alt_rtl)
*alt_rtl = DECL_RTL (exp);
if the address is a register. */
if (temp != 0)
{
- if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
+ if (GET_CODE (temp) == MEM && REG_P (XEXP (temp, 0)))
mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
return temp;
must be a promoted value. We return a SUBREG of the wanted mode,
but mark it so that we know that it was already extended. */
- if (GET_CODE (DECL_RTL (exp)) == REG
+ if (REG_P (DECL_RTL (exp))
&& GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
{
/* Get the signedness used for this variable. Ensure we get the
copy_rtx (XEXP (temp, 0)));
return temp;
- case EXPR_WITH_FILE_LOCATION:
- {
- rtx to_return;
- struct file_stack fs;
-
- fs.location = input_location;
- fs.next = expr_wfl_stack;
- input_filename = EXPR_WFL_FILENAME (exp);
- input_line = EXPR_WFL_LINENO (exp);
- expr_wfl_stack = &fs;
- if (EXPR_WFL_EMIT_LINE_NOTE (exp))
- emit_line_note (input_location);
- /* Possibly avoid switching back and forth here. */
- to_return = expand_expr (EXPR_WFL_NODE (exp),
- (ignore ? const0_rtx : target),
- tmode, modifier);
- if (expr_wfl_stack != &fs)
- abort ();
- input_location = fs.location;
- expr_wfl_stack = fs.next;
- return to_return;
- }
-
case SAVE_EXPR:
context = decl_function_context (exp);
if (context == 0)
SAVE_EXPR_CONTEXT (exp) = current_function_decl;
- /* We treat inline_function_decl as an alias for the current function
- because that is the inline function whose vars, types, etc.
- are being merged into the current function.
- See expand_inline_function. */
- if (context == current_function_decl || context == inline_function_decl)
+ if (context == current_function_decl)
context = 0;
/* If this is non-local, handle it. */
find_function_data (context);
temp = SAVE_EXPR_RTL (exp);
- if (temp && GET_CODE (temp) == REG)
+ if (temp && REG_P (temp))
{
put_var_into_stack (exp, /*rescan=*/true);
temp = SAVE_EXPR_RTL (exp);
3, 0, 0);
SAVE_EXPR_RTL (exp) = temp;
- if (!optimize && GET_CODE (temp) == REG)
+ if (!optimize && REG_P (temp))
save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
save_expr_regs);
wanted mode but mark it so that we know that it was already
extended. */
- if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
+ if (REG_P (temp) && GET_MODE (temp) != mode)
{
temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
promote_mode (type, mode, &unsignedp, 0);
must be a promoted value. We return a SUBREG of the wanted mode,
but mark it so that we know that it was already extended. */
- if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
+ if (REG_P (SAVE_EXPR_RTL (exp))
&& GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
{
/* Compute the signedness and make the proper SUBREG. */
expand_computed_goto (TREE_OPERAND (exp, 0));
return const0_rtx;
+ /* These are lowered during gimplification, so we should never ever
+ see them here. */
+ case LOOP_EXPR:
case EXIT_EXPR:
- expand_exit_loop_if_false (NULL,
- invert_truthvalue (TREE_OPERAND (exp, 0)));
- return const0_rtx;
+ abort ();
case LABELED_BLOCK_EXPR:
if (LABELED_BLOCK_BODY (exp))
expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
return const0_rtx;
- case LOOP_EXPR:
- push_temp_slots ();
- expand_start_loop (1);
- expand_expr_stmt_value (TREE_OPERAND (exp, 0), 0, 1);
- expand_end_loop ();
- pop_temp_slots ();
-
- return const0_rtx;
-
case BIND_EXPR:
{
- tree vars = TREE_OPERAND (exp, 0);
+ tree block = BIND_EXPR_BLOCK (exp);
+ int mark_ends;
- /* Need to open a binding contour here because
- if there are any cleanups they must be contained here. */
- expand_start_bindings (2);
+ if (TREE_CODE (BIND_EXPR_BODY (exp)) != RTL_EXPR)
+ {
+ /* If we're in functions-as-trees mode, this BIND_EXPR represents
+ the block, so we need to emit NOTE_INSN_BLOCK_* notes. */
+ mark_ends = (block != NULL_TREE);
+ expand_start_bindings_and_block (mark_ends ? 0 : 2, block);
+ }
+ else
+ {
+ /* If we're not in functions-as-trees mode, we've already emitted
+ those notes into our RTL_EXPR, so we just want to splice our BLOCK
+ into the enclosing one. */
+ mark_ends = 0;
- /* Mark the corresponding BLOCK for output in its proper place. */
- if (TREE_OPERAND (exp, 2) != 0
- && ! TREE_USED (TREE_OPERAND (exp, 2)))
- lang_hooks.decls.insert_block (TREE_OPERAND (exp, 2));
+ /* Need to open a binding contour here because
+ if there are any cleanups they must be contained here. */
+ expand_start_bindings_and_block (2, NULL_TREE);
- /* If VARS have not yet been expanded, expand them now. */
- while (vars)
- {
- if (!DECL_RTL_SET_P (vars))
- expand_decl (vars);
- expand_decl_init (vars);
- vars = TREE_CHAIN (vars);
+ /* Mark the corresponding BLOCK for output in its proper place. */
+ if (block)
+ {
+ if (TREE_USED (block))
+ abort ();
+ lang_hooks.decls.insert_block (block);
+ }
}
- temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
+ /* If VARS have not yet been expanded, expand them now. */
+ expand_vars (BIND_EXPR_VARS (exp));
+
+ /* TARGET was clobbered early in this function. The correct
+ indicator or whether or not we need the value of this
+ expression is the IGNORE variable. */
+ temp = expand_expr (BIND_EXPR_BODY (exp),
+ ignore ? const0_rtx : target,
+ tmode, modifier);
- expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
+ expand_end_bindings (BIND_EXPR_VARS (exp), mark_ends, 0);
return temp;
}
&& (! MOVE_BY_PIECES_P
(tree_low_cst (TYPE_SIZE_UNIT (type), 1),
TYPE_ALIGN (type)))
- && ((TREE_CODE (type) == VECTOR_TYPE
- && !is_zeros_p (exp))
- || ! mostly_zeros_p (exp)))))
+ && ! mostly_zeros_p (exp))))
|| ((modifier == EXPAND_INITIALIZER
|| modifier == EXPAND_CONST_ADDRESS)
&& TREE_CONSTANT (exp)))
case INDIRECT_REF:
{
tree exp1 = TREE_OPERAND (exp, 0);
- tree index;
- tree string = string_constant (exp1, &index);
- /* Try to optimize reads from const strings. */
- if (string
- && TREE_CODE (string) == STRING_CST
- && TREE_CODE (index) == INTEGER_CST
- && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
- && GET_MODE_CLASS (mode) == MODE_INT
- && GET_MODE_SIZE (mode) == 1
- && modifier != EXPAND_WRITE)
- return gen_int_mode (TREE_STRING_POINTER (string)
- [TREE_INT_CST_LOW (index)], mode);
+ if (modifier != EXPAND_WRITE)
+ {
+ tree t;
+
+ t = fold_read_from_constant_string (exp);
+ if (t)
+ return expand_expr (t, target, tmode, modifier);
+ }
op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
op0 = memory_address (mode, op0);
}
case ARRAY_REF:
+
+#ifdef ENABLE_CHECKING
if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
abort ();
+#endif
{
tree array = TREE_OPERAND (exp, 0);
if (modifier != EXPAND_CONST_ADDRESS
&& modifier != EXPAND_INITIALIZER
- && modifier != EXPAND_MEMORY
- && TREE_CODE (array) == STRING_CST
- && TREE_CODE (index) == INTEGER_CST
- && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
- && GET_MODE_CLASS (mode) == MODE_INT
- && GET_MODE_SIZE (mode) == 1)
- return gen_int_mode (TREE_STRING_POINTER (array)
- [TREE_INT_CST_LOW (index)], mode);
+ && modifier != EXPAND_MEMORY)
+ {
+ tree t = fold_read_from_constant_string (exp);
+
+ if (t)
+ return expand_expr (t, target, tmode, modifier);
+ }
/* If this is a constant index into a constant array,
just get the value from the array. Handle both the cases when
(which we know to be the width of a basic mode), then
storing into memory, and changing the mode to BLKmode. */
if (mode1 == VOIDmode
- || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
+ || REG_P (op0) || GET_CODE (op0) == SUBREG
|| (mode1 != BLKmode && ! direct_load[(int) mode1]
&& GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
&& GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
op0 = validize_mem (op0);
- if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
+ if (GET_CODE (op0) == MEM && REG_P (XEXP (op0, 0)))
mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
op0 = copy_rtx (op0);
set_mem_attributes (op0, exp, 0);
- if (GET_CODE (XEXP (op0, 0)) == REG)
+ if (REG_P (XEXP (op0, 0)))
mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
MEM_VOLATILE_P (op0) |= volatilep;
{
if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
== BUILT_IN_FRONTEND)
- /* ??? Use (*fun) form because expand_expr is a macro. */
- return (*lang_hooks.expand_expr) (exp, original_target,
- tmode, modifier,
- alt_rtl);
+ return lang_hooks.expand_expr (exp, original_target,
+ tmode, modifier,
+ alt_rtl);
else
return expand_builtin (exp, target, subtarget, tmode, ignore);
}
adjust_address (target, TYPE_MODE (valtype), 0),
modifier == EXPAND_STACK_PARM ? 2 : 0);
- else if (GET_CODE (target) == REG)
+ else if (REG_P (target))
/* Store this field into a union of the proper type. */
store_field (target,
MIN ((int_size_in_bytes (TREE_TYPE
op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
EXPAND_SUM);
- if (GET_CODE (op0) != REG)
+ if (!REG_P (op0))
op0 = force_operand (op0, NULL_RTX);
- if (GET_CODE (op0) != REG)
+ if (!REG_P (op0))
op0 = copy_to_mode_reg (mode, op0);
return gen_rtx_MULT (mode, op0,
|| modifier == EXPAND_STACK_PARM
|| (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
|| GET_MODE (target) != mode
- || (GET_CODE (target) == REG
+ || (REG_P (target)
&& REGNO (target) < FIRST_PSEUDO_REGISTER))
target = gen_reg_rtx (mode);
expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
case UNGT_EXPR:
case UNGE_EXPR:
case UNEQ_EXPR:
+ case LTGT_EXPR:
temp = do_store_flag (exp,
modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
tmode != VOIDmode ? tmode : mode, 0);
/* For foo != 0, load foo, and if it is nonzero load 1 instead. */
if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
&& original_target
- && GET_CODE (original_target) == REG
+ && REG_P (original_target)
&& (GET_MODE (original_target)
== TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
{
|| ! safe_from_p (target, exp, 1)
/* Make sure we don't have a hard reg (such as function's return
value) live across basic blocks, if not optimizing. */
- || (!optimize && GET_CODE (target) == REG
+ || (!optimize && REG_P (target)
&& REGNO (target) < FIRST_PSEUDO_REGISTER)))
target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
(ignore ? const0_rtx : target),
VOIDmode, modifier, alt_rtl);
+ case STATEMENT_LIST:
+ {
+ tree_stmt_iterator iter;
+
+ if (!ignore)
+ abort ();
+
+ for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
+ expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
+ }
+ return const0_rtx;
+
case COND_EXPR:
+ /* If it's void, we don't need to worry about computing a value. */
+ if (VOID_TYPE_P (TREE_TYPE (exp)))
+ {
+ tree pred = TREE_OPERAND (exp, 0);
+ tree then_ = TREE_OPERAND (exp, 1);
+ tree else_ = TREE_OPERAND (exp, 2);
+
+ /* If we do not have any pending cleanups or stack_levels
+ to restore, and at least one arm of the COND_EXPR is a
+ GOTO_EXPR to a local label, then we can emit more efficient
+ code by using jumpif/jumpifnot instead of the 'if' machinery. */
+ if (! optimize
+ || containing_blocks_have_cleanups_or_stack_level ())
+ ;
+ else if (TREE_CODE (then_) == GOTO_EXPR
+ && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL)
+ {
+ jumpif (pred, label_rtx (GOTO_DESTINATION (then_)));
+ return expand_expr (else_, const0_rtx, VOIDmode, 0);
+ }
+ else if (TREE_CODE (else_) == GOTO_EXPR
+ && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL)
+ {
+ jumpifnot (pred, label_rtx (GOTO_DESTINATION (else_)));
+ return expand_expr (then_, const0_rtx, VOIDmode, 0);
+ }
+
+ /* Just use the 'if' machinery. */
+ expand_start_cond (pred, 0);
+ start_cleanup_deferral ();
+ expand_expr (then_, const0_rtx, VOIDmode, 0);
+
+ exp = else_;
+
+ /* Iterate over 'else if's instead of recursing. */
+ for (; TREE_CODE (exp) == COND_EXPR; exp = TREE_OPERAND (exp, 2))
+ {
+ expand_start_else ();
+ if (EXPR_HAS_LOCATION (exp))
+ {
+ emit_line_note (EXPR_LOCATION (exp));
+ if (cfun->dont_emit_block_notes)
+ record_block_change (TREE_BLOCK (exp));
+ }
+ expand_elseif (TREE_OPERAND (exp, 0));
+ expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, 0);
+ }
+ /* Don't emit the jump and label if there's no 'else' clause. */
+ if (TREE_SIDE_EFFECTS (exp))
+ {
+ expand_start_else ();
+ expand_expr (exp, const0_rtx, VOIDmode, 0);
+ }
+ end_cleanup_deferral ();
+ expand_end_cond ();
+ return const0_rtx;
+ }
+
/* If we would have a "singleton" (see below) were it not for a
conversion in each arm, bring that conversion back out. */
if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
temp = assign_temp (type, 0, 0, 1);
else if (original_target
&& (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
- || (singleton && GET_CODE (original_target) == REG
+ || (singleton && REG_P (original_target)
&& REGNO (original_target) >= FIRST_PSEUDO_REGISTER
&& original_target == var_rtx (singleton)))
&& GET_MODE (original_target) == mode
#ifdef HAVE_conditional_move
&& (! can_conditionally_move_p (mode)
- || GET_CODE (original_target) == REG
+ || REG_P (original_target)
|| TREE_ADDRESSABLE (type))
#endif
&& (GET_CODE (original_target) != MEM
might clobber it. */
if ((binary_op
&& ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
- || (GET_CODE (temp) == REG
+ || (REG_P (temp)
&& REGNO (temp) < FIRST_PSEUDO_REGISTER))
temp = gen_reg_rtx (mode);
store_expr (singleton, temp,
|| TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
&& safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
{
- if (GET_CODE (temp) == REG
+ if (REG_P (temp)
&& REGNO (temp) < FIRST_PSEUDO_REGISTER)
temp = gen_reg_rtx (mode);
store_expr (TREE_OPERAND (exp, 1), temp,
|| TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
&& safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
{
- if (GET_CODE (temp) == REG
+ if (REG_P (temp)
&& REGNO (temp) < FIRST_PSEUDO_REGISTER)
temp = gen_reg_rtx (mode);
store_expr (TREE_OPERAND (exp, 2), temp,
/* Mark it as expanded. */
TREE_OPERAND (exp, 1) = NULL_TREE;
- store_expr (exp1, target, modifier == EXPAND_STACK_PARM ? 2 : 0);
+ if (VOID_TYPE_P (TREE_TYPE (exp1)))
+ /* If the initializer is void, just expand it; it will initialize
+ the object directly. */
+ expand_expr (exp1, const0_rtx, VOIDmode, 0);
+ else
+ store_expr (exp1, target, modifier == EXPAND_STACK_PARM ? 2 : 0);
expand_decl_cleanup_eh (NULL_TREE, cleanups, CLEANUP_EH_ONLY (exp));
case ADDR_EXPR:
if (modifier == EXPAND_STACK_PARM)
target = 0;
- /* Are we taking the address of a nested function? */
- if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
- && decl_function_context (TREE_OPERAND (exp, 0)) != 0
- && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
- && ! TREE_STATIC (exp))
- {
- op0 = trampoline_address (TREE_OPERAND (exp, 0));
- op0 = force_operand (op0, target);
- }
/* If we are taking the address of something erroneous, just
return a zero. */
- else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
+ if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
return const0_rtx;
/* If we are taking the address of a constant and are at the
top level, we have to use output_constant_def since we can't
if (CONSTANT_P (op0))
op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
op0);
- else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
+ else if (REG_P (op0) || GET_CODE (op0) == SUBREG
|| GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
|| GET_CODE (op0) == PARALLEL || GET_CODE (op0) == LO_SUM)
{
}
if (flag_force_addr
- && GET_CODE (op0) != REG
+ && !REG_P (op0)
&& modifier != EXPAND_CONST_ADDRESS
&& modifier != EXPAND_INITIALIZER
&& modifier != EXPAND_SUM)
op0 = force_reg (Pmode, op0);
- if (GET_CODE (op0) == REG
+ if (REG_P (op0)
&& ! REG_USERVAR_P (op0))
mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
return target;
}
+ case RESX_EXPR:
+ expand_resx_expr (exp);
+ return const0_rtx;
+
case TRY_CATCH_EXPR:
{
tree handler = TREE_OPERAND (exp, 1);
expand_eh_region_start ();
-
op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
-
- expand_eh_region_end_cleanup (handler);
+ expand_eh_handler (handler);
return op0;
}
+ case CATCH_EXPR:
+ expand_start_catch (CATCH_TYPES (exp));
+ expand_expr (CATCH_BODY (exp), const0_rtx, VOIDmode, 0);
+ expand_end_catch ();
+ return const0_rtx;
+
+ case EH_FILTER_EXPR:
+ /* Should have been handled in expand_eh_handler. */
+ abort ();
+
case TRY_FINALLY_EXPR:
{
tree try_block = TREE_OPERAND (exp, 0);
tree finally_block = TREE_OPERAND (exp, 1);
- if (!optimize || unsafe_for_reeval (finally_block) > 1)
+ if ((!optimize && lang_protect_cleanup_actions == NULL)
+ || unsafe_for_reeval (finally_block) > 1)
{
/* In this case, wrapping FINALLY_BLOCK in an UNSAVE_EXPR
is not sufficient, so we cannot expand the block twice.
case EXC_PTR_EXPR:
return get_exception_pointer (cfun);
+ case FILTER_EXPR:
+ return get_exception_filter (cfun);
+
case FDESC_EXPR:
/* Function descriptors are not valid except for as
initialization constants, and should not be expanded. */
abort ();
+ case SWITCH_EXPR:
+ expand_start_case (0, SWITCH_COND (exp), integer_type_node,
+ "switch");
+ if (SWITCH_BODY (exp))
+ expand_expr_stmt (SWITCH_BODY (exp));
+ if (SWITCH_LABELS (exp))
+ {
+ tree duplicate = 0;
+ tree vec = SWITCH_LABELS (exp);
+ size_t i, n = TREE_VEC_LENGTH (vec);
+
+ for (i = 0; i < n; ++i)
+ {
+ tree elt = TREE_VEC_ELT (vec, i);
+ tree controlling_expr_type = TREE_TYPE (SWITCH_COND (exp));
+ tree min_value = TYPE_MIN_VALUE (controlling_expr_type);
+ tree max_value = TYPE_MAX_VALUE (controlling_expr_type);
+
+ tree case_low = CASE_LOW (elt);
+ tree case_high = CASE_HIGH (elt) ? CASE_HIGH (elt) : case_low;
+ if (case_low && case_high)
+ {
+ /* Case label is less than minimum for type. */
+ if ((tree_int_cst_compare (case_low, min_value) < 0)
+ && (tree_int_cst_compare (case_high, min_value) < 0))
+ {
+ warning ("case label value %d is less than minimum value for type",
+ TREE_INT_CST (case_low));
+ continue;
+ }
+
+ /* Case value is greater than maximum for type. */
+ if ((tree_int_cst_compare (case_low, max_value) > 0)
+ && (tree_int_cst_compare (case_high, max_value) > 0))
+ {
+ warning ("case label value %d exceeds maximum value for type",
+ TREE_INT_CST (case_high));
+ continue;
+ }
+
+ /* Saturate lower case label value to minimum. */
+ if ((tree_int_cst_compare (case_high, min_value) >= 0)
+ && (tree_int_cst_compare (case_low, min_value) < 0))
+ {
+ warning ("lower value %d in case label range less than minimum value for type",
+ TREE_INT_CST (case_low));
+ case_low = min_value;
+ }
+
+ /* Saturate upper case label value to maximum. */
+ if ((tree_int_cst_compare (case_low, max_value) <= 0)
+ && (tree_int_cst_compare (case_high, max_value) > 0))
+ {
+ warning ("upper value %d in case label range exceeds maximum value for type",
+ TREE_INT_CST (case_high));
+ case_high = max_value;
+ }
+ }
+
+ add_case_node (case_low, case_high, CASE_LABEL (elt), &duplicate, true);
+ if (duplicate)
+ abort ();
+ }
+ }
+ expand_end_case_type (SWITCH_COND (exp), TREE_TYPE (exp));
+ return const0_rtx;
+
+ case LABEL_EXPR:
+ expand_label (TREE_OPERAND (exp, 0));
+ return const0_rtx;
+
+ case CASE_LABEL_EXPR:
+ {
+ tree duplicate = 0;
+ add_case_node (CASE_LOW (exp), CASE_HIGH (exp), CASE_LABEL (exp),
+ &duplicate, false);
+ if (duplicate)
+ abort ();
+ return const0_rtx;
+ }
+
+ case ASM_EXPR:
+ expand_asm_expr (exp);
+ return const0_rtx;
+
default:
- /* ??? Use (*fun) form because expand_expr is a macro. */
- return (*lang_hooks.expand_expr) (exp, original_target, tmode,
- modifier, alt_rtl);
+ return lang_hooks.expand_expr (exp, original_target, tmode,
+ modifier, alt_rtl);
}
/* Here to do an ordinary binary operator, generating an instruction
*ptr_offset = size_zero_node;
return TREE_OPERAND (arg, 0);
}
+ if (TREE_CODE (arg) == ADDR_EXPR
+ && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
+ && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg, 0), 0)) == STRING_CST)
+ {
+ *ptr_offset = convert (sizetype, TREE_OPERAND (TREE_OPERAND (arg, 0), 1));
+ return TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
+ }
else if (TREE_CODE (arg) == PLUS_EXPR)
{
tree arg0 = TREE_OPERAND (arg, 0);
bad_subreg = 1;
}
- op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
+ op0_is_copy = ((GET_CODE (op0) == SUBREG || REG_P (op0))
&& temp != get_last_insn ());
op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
case UNEQ_EXPR:
code = UNEQ;
break;
+ case LTGT_EXPR:
+ code = LTGT;
+ break;
default:
abort ();
}
/* If this failed, we have to do this with set/compare/jump/set code. */
- if (GET_CODE (target) != REG
+ if (!REG_P (target)
|| reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
target = gen_reg_rtx (GET_MODE (target));
out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
and break_out_memory_refs will go to work on it and mess it up. */
#ifdef PIC_CASE_VECTOR_ADDRESS
- if (flag_pic && GET_CODE (index) != REG)
+ if (flag_pic && !REG_P (index))
index = copy_to_mode_reg (Pmode, index);
#endif
mode = TYPE_MODE (TREE_TYPE (exp));
- if (is_zeros_p (exp))
+ if (initializer_zerop (exp))
return CONST0_RTX (mode);
units = GET_MODE_NUNITS (mode);
return gen_rtx_raw_CONST_VECTOR (mode, v);
}
-
#include "gt-expr.h"