1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 #include "coretypes.h"
32 #include "hard-reg-set.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
44 #include "typeclass.h"
47 #include "langhooks.h"
50 #include "tree-iterator.h"
51 #include "tree-pass.h"
52 #include "tree-flow.h"
56 /* Decide whether a function's arguments should be processed
57 from first to last or from last to first.
59 They should if the stack and args grow in opposite directions, but
60 only if we have push insns. */
64 #ifndef PUSH_ARGS_REVERSED
65 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
66 #define PUSH_ARGS_REVERSED /* If it's last to first. */
72 #ifndef STACK_PUSH_CODE
73 #ifdef STACK_GROWS_DOWNWARD
74 #define STACK_PUSH_CODE PRE_DEC
76 #define STACK_PUSH_CODE PRE_INC
80 /* Convert defined/undefined to boolean. */
81 #ifdef TARGET_MEM_FUNCTIONS
82 #undef TARGET_MEM_FUNCTIONS
83 #define TARGET_MEM_FUNCTIONS 1
85 #define TARGET_MEM_FUNCTIONS 0
89 /* If this is nonzero, we do not bother generating VOLATILE
90 around volatile memory references, and we are willing to
91 output indirect addresses. If cse is to follow, we reject
92 indirect addresses so a useful potential cse is generated;
93 if it is used only once, instruction combination will produce
94 the same indirect address eventually. */
97 /* This structure is used by move_by_pieces to describe the move to
108 int explicit_inc_from;
109 unsigned HOST_WIDE_INT len;
110 HOST_WIDE_INT offset;
114 /* This structure is used by store_by_pieces to describe the clear to
117 struct store_by_pieces
123 unsigned HOST_WIDE_INT len;
124 HOST_WIDE_INT offset;
125 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
130 static rtx enqueue_insn (rtx, rtx);
131 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
133 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
134 struct move_by_pieces *);
135 static bool block_move_libcall_safe_for_call_parm (void);
136 static bool emit_block_move_via_movstr (rtx, rtx, rtx, unsigned);
137 static rtx emit_block_move_via_libcall (rtx, rtx, rtx);
138 static tree emit_block_move_libcall_fn (int);
139 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
140 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
141 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
142 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
143 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
144 struct store_by_pieces *);
145 static bool clear_storage_via_clrstr (rtx, rtx, unsigned);
146 static rtx clear_storage_via_libcall (rtx, rtx);
147 static tree clear_storage_libcall_fn (int);
148 static rtx compress_float_constant (rtx, rtx);
149 static rtx get_subtarget (rtx);
150 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
151 HOST_WIDE_INT, enum machine_mode,
152 tree, tree, int, int);
153 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
154 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
155 tree, enum machine_mode, int, tree, int);
156 static rtx var_rtx (tree);
158 static unsigned HOST_WIDE_INT highest_pow2_factor (tree);
159 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (tree, tree);
161 static int is_aligning_offset (tree, tree);
162 static rtx expand_increment (tree, int, int);
163 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
164 enum expand_modifier);
165 static rtx do_store_flag (tree, rtx, enum machine_mode, int);
167 static void emit_single_push_insn (enum machine_mode, rtx, tree);
169 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
170 static rtx const_vector_from_tree (tree);
172 /* Record for each mode whether we can move a register directly to or
173 from an object of that mode in memory. If we can't, we won't try
174 to use that mode directly when accessing a field of that mode. */
176 static char direct_load[NUM_MACHINE_MODES];
177 static char direct_store[NUM_MACHINE_MODES];
179 /* Record for each mode whether we can float-extend from memory. */
181 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
183 /* This macro is used to determine whether move_by_pieces should be called
184 to perform a structure copy. */
185 #ifndef MOVE_BY_PIECES_P
186 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
187 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
190 /* This macro is used to determine whether clear_by_pieces should be
191 called to clear storage. */
192 #ifndef CLEAR_BY_PIECES_P
193 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
194 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
197 /* This macro is used to determine whether store_by_pieces should be
198 called to "memset" storage with byte values other than zero, or
199 to "memcpy" storage when the source is a constant string. */
200 #ifndef STORE_BY_PIECES_P
201 #define STORE_BY_PIECES_P(SIZE, ALIGN) MOVE_BY_PIECES_P (SIZE, ALIGN)
204 /* This array records the insn_code of insns to perform block moves. */
205 enum insn_code movstr_optab[NUM_MACHINE_MODES];
207 /* This array records the insn_code of insns to perform block clears. */
208 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
210 /* These arrays record the insn_code of two different kinds of insns
211 to perform block compares. */
212 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
213 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
215 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
217 #ifndef SLOW_UNALIGNED_ACCESS
218 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
221 /* This is run once per compilation to set up which modes can be used
222 directly in memory and to initialize the block move optab. */
225 init_expr_once (void)
228 enum machine_mode mode;
233 /* Try indexing by frame ptr and try by stack ptr.
234 It is known that on the Convex the stack ptr isn't a valid index.
235 With luck, one or the other is valid on any machine. */
236 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
237 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
239 /* A scratch register we can modify in-place below to avoid
240 useless RTL allocations. */
241 reg = gen_rtx_REG (VOIDmode, -1);
243 insn = rtx_alloc (INSN);
244 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
245 PATTERN (insn) = pat;
247 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
248 mode = (enum machine_mode) ((int) mode + 1))
252 direct_load[(int) mode] = direct_store[(int) mode] = 0;
253 PUT_MODE (mem, mode);
254 PUT_MODE (mem1, mode);
255 PUT_MODE (reg, mode);
257 /* See if there is some register that can be used in this mode and
258 directly loaded or stored from memory. */
260 if (mode != VOIDmode && mode != BLKmode)
261 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
262 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
265 if (! HARD_REGNO_MODE_OK (regno, mode))
271 SET_DEST (pat) = reg;
272 if (recog (pat, insn, &num_clobbers) >= 0)
273 direct_load[(int) mode] = 1;
275 SET_SRC (pat) = mem1;
276 SET_DEST (pat) = reg;
277 if (recog (pat, insn, &num_clobbers) >= 0)
278 direct_load[(int) mode] = 1;
281 SET_DEST (pat) = mem;
282 if (recog (pat, insn, &num_clobbers) >= 0)
283 direct_store[(int) mode] = 1;
286 SET_DEST (pat) = mem1;
287 if (recog (pat, insn, &num_clobbers) >= 0)
288 direct_store[(int) mode] = 1;
292 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
294 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
295 mode = GET_MODE_WIDER_MODE (mode))
297 enum machine_mode srcmode;
298 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
299 srcmode = GET_MODE_WIDER_MODE (srcmode))
303 ic = can_extend_p (mode, srcmode, 0);
304 if (ic == CODE_FOR_nothing)
307 PUT_MODE (mem, srcmode);
309 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
310 float_extend_from_mem[mode][srcmode] = true;
315 /* This is run at the start of compiling a function. */
320 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
323 /* Small sanity check that the queue is empty at the end of a function. */
326 finish_expr_for_function (void)
332 /* Manage the queue of increment instructions to be output
333 for POSTINCREMENT_EXPR expressions, etc. */
335 /* Queue up to increment (or change) VAR later. BODY says how:
336 BODY should be the same thing you would pass to emit_insn
337 to increment right away. It will go to emit_insn later on.
339 The value is a QUEUED expression to be used in place of VAR
340 where you want to guarantee the pre-incrementation value of VAR. */
343 enqueue_insn (rtx var, rtx body)
345 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
346 body, pending_chain);
347 return pending_chain;
350 /* Use protect_from_queue to convert a QUEUED expression
351 into something that you can put immediately into an instruction.
352 If the queued incrementation has not happened yet,
353 protect_from_queue returns the variable itself.
354 If the incrementation has happened, protect_from_queue returns a temp
355 that contains a copy of the old value of the variable.
357 Any time an rtx which might possibly be a QUEUED is to be put
358 into an instruction, it must be passed through protect_from_queue first.
359 QUEUED expressions are not meaningful in instructions.
361 Do not pass a value through protect_from_queue and then hold
362 on to it for a while before putting it in an instruction!
363 If the queue is flushed in between, incorrect code will result. */
366 protect_from_queue (rtx x, int modify)
368 RTX_CODE code = GET_CODE (x);
370 #if 0 /* A QUEUED can hang around after the queue is forced out. */
371 /* Shortcut for most common case. */
372 if (pending_chain == 0)
378 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
379 use of autoincrement. Make a copy of the contents of the memory
380 location rather than a copy of the address, but not if the value is
381 of mode BLKmode. Don't modify X in place since it might be
383 if (code == MEM && GET_MODE (x) != BLKmode
384 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
387 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
391 rtx temp = gen_reg_rtx (GET_MODE (x));
393 emit_insn_before (gen_move_insn (temp, new),
398 /* Copy the address into a pseudo, so that the returned value
399 remains correct across calls to emit_queue. */
400 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
403 /* Otherwise, recursively protect the subexpressions of all
404 the kinds of rtx's that can contain a QUEUED. */
407 rtx tem = protect_from_queue (XEXP (x, 0), 0);
408 if (tem != XEXP (x, 0))
414 else if (code == PLUS || code == MULT)
416 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
417 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
418 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
427 /* If the increment has not happened, use the variable itself. Copy it
428 into a new pseudo so that the value remains correct across calls to
430 if (QUEUED_INSN (x) == 0)
431 return copy_to_reg (QUEUED_VAR (x));
432 /* If the increment has happened and a pre-increment copy exists,
434 if (QUEUED_COPY (x) != 0)
435 return QUEUED_COPY (x);
436 /* The increment has happened but we haven't set up a pre-increment copy.
437 Set one up now, and use it. */
438 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
439 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
441 return QUEUED_COPY (x);
444 /* Return nonzero if X contains a QUEUED expression:
445 if it contains anything that will be altered by a queued increment.
446 We handle only combinations of MEM, PLUS, MINUS and MULT operators
447 since memory addresses generally contain only those. */
450 queued_subexp_p (rtx x)
452 enum rtx_code code = GET_CODE (x);
458 return queued_subexp_p (XEXP (x, 0));
462 return (queued_subexp_p (XEXP (x, 0))
463 || queued_subexp_p (XEXP (x, 1)));
469 /* Retrieve a mark on the queue. */
474 return pending_chain;
477 /* Perform all the pending incrementations that have been enqueued
478 after MARK was retrieved. If MARK is null, perform all the
479 pending incrementations. */
482 emit_insns_enqueued_after_mark (rtx mark)
486 /* The marked incrementation may have been emitted in the meantime
487 through a call to emit_queue. In this case, the mark is not valid
488 anymore so do nothing. */
489 if (mark && ! QUEUED_BODY (mark))
492 while ((p = pending_chain) != mark)
494 rtx body = QUEUED_BODY (p);
496 switch (GET_CODE (body))
504 QUEUED_INSN (p) = body;
508 #ifdef ENABLE_CHECKING
515 QUEUED_INSN (p) = emit_insn (body);
520 pending_chain = QUEUED_NEXT (p);
524 /* Perform all the pending incrementations. */
529 emit_insns_enqueued_after_mark (NULL_RTX);
532 /* Copy data from FROM to TO, where the machine modes are not the same.
533 Both modes may be integer, or both may be floating.
534 UNSIGNEDP should be nonzero if FROM is an unsigned type.
535 This causes zero-extension instead of sign-extension. */
538 convert_move (rtx to, rtx from, int unsignedp)
540 enum machine_mode to_mode = GET_MODE (to);
541 enum machine_mode from_mode = GET_MODE (from);
542 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
543 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
547 /* rtx code for making an equivalent value. */
548 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
549 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
551 to = protect_from_queue (to, 1);
552 from = protect_from_queue (from, 0);
554 if (to_real != from_real)
557 /* If the source and destination are already the same, then there's
562 /* If FROM is a SUBREG that indicates that we have already done at least
563 the required extension, strip it. We don't handle such SUBREGs as
566 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
567 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
568 >= GET_MODE_SIZE (to_mode))
569 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
570 from = gen_lowpart (to_mode, from), from_mode = to_mode;
572 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
575 if (to_mode == from_mode
576 || (from_mode == VOIDmode && CONSTANT_P (from)))
578 emit_move_insn (to, from);
582 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
584 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
587 if (VECTOR_MODE_P (to_mode))
588 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
590 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
592 emit_move_insn (to, from);
596 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
598 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
599 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
608 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
610 else if (GET_MODE_PRECISION (from_mode) > GET_MODE_PRECISION (to_mode))
615 /* Try converting directly if the insn is supported. */
617 code = tab->handlers[to_mode][from_mode].insn_code;
618 if (code != CODE_FOR_nothing)
620 emit_unop_insn (code, to, from,
621 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
625 /* Otherwise use a libcall. */
626 libcall = tab->handlers[to_mode][from_mode].libfunc;
629 /* This conversion is not implemented yet. */
633 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
635 insns = get_insns ();
637 emit_libcall_block (insns, to, value,
638 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
640 : gen_rtx_FLOAT_EXTEND (to_mode, from));
644 /* Handle pointer conversion. */ /* SPEE 900220. */
645 /* Targets are expected to provide conversion insns between PxImode and
646 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
647 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
649 enum machine_mode full_mode
650 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
652 if (trunc_optab->handlers[to_mode][full_mode].insn_code
656 if (full_mode != from_mode)
657 from = convert_to_mode (full_mode, from, unsignedp);
658 emit_unop_insn (trunc_optab->handlers[to_mode][full_mode].insn_code,
662 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
664 enum machine_mode full_mode
665 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
667 if (sext_optab->handlers[full_mode][from_mode].insn_code
671 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
673 if (to_mode == full_mode)
676 /* else proceed to integer conversions below. */
677 from_mode = full_mode;
680 /* Now both modes are integers. */
682 /* Handle expanding beyond a word. */
683 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
684 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
691 enum machine_mode lowpart_mode;
692 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
694 /* Try converting directly if the insn is supported. */
695 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
698 /* If FROM is a SUBREG, put it into a register. Do this
699 so that we always generate the same set of insns for
700 better cse'ing; if an intermediate assignment occurred,
701 we won't be doing the operation directly on the SUBREG. */
702 if (optimize > 0 && GET_CODE (from) == SUBREG)
703 from = force_reg (from_mode, from);
704 emit_unop_insn (code, to, from, equiv_code);
707 /* Next, try converting via full word. */
708 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
709 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
710 != CODE_FOR_nothing))
714 if (reg_overlap_mentioned_p (to, from))
715 from = force_reg (from_mode, from);
716 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
718 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
719 emit_unop_insn (code, to,
720 gen_lowpart (word_mode, to), equiv_code);
724 /* No special multiword conversion insn; do it by hand. */
727 /* Since we will turn this into a no conflict block, we must ensure
728 that the source does not overlap the target. */
730 if (reg_overlap_mentioned_p (to, from))
731 from = force_reg (from_mode, from);
733 /* Get a copy of FROM widened to a word, if necessary. */
734 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
735 lowpart_mode = word_mode;
737 lowpart_mode = from_mode;
739 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
741 lowpart = gen_lowpart (lowpart_mode, to);
742 emit_move_insn (lowpart, lowfrom);
744 /* Compute the value to put in each remaining word. */
746 fill_value = const0_rtx;
751 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
752 && STORE_FLAG_VALUE == -1)
754 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
756 fill_value = gen_reg_rtx (word_mode);
757 emit_insn (gen_slt (fill_value));
763 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
764 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
766 fill_value = convert_to_mode (word_mode, fill_value, 1);
770 /* Fill the remaining words. */
771 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
773 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
774 rtx subword = operand_subword (to, index, 1, to_mode);
779 if (fill_value != subword)
780 emit_move_insn (subword, fill_value);
783 insns = get_insns ();
786 emit_no_conflict_block (insns, to, from, NULL_RTX,
787 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
791 /* Truncating multi-word to a word or less. */
792 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
793 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
795 if (!((GET_CODE (from) == MEM
796 && ! MEM_VOLATILE_P (from)
797 && direct_load[(int) to_mode]
798 && ! mode_dependent_address_p (XEXP (from, 0)))
800 || GET_CODE (from) == SUBREG))
801 from = force_reg (from_mode, from);
802 convert_move (to, gen_lowpart (word_mode, from), 0);
806 /* Now follow all the conversions between integers
807 no more than a word long. */
809 /* For truncation, usually we can just refer to FROM in a narrower mode. */
810 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
811 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
812 GET_MODE_BITSIZE (from_mode)))
814 if (!((GET_CODE (from) == MEM
815 && ! MEM_VOLATILE_P (from)
816 && direct_load[(int) to_mode]
817 && ! mode_dependent_address_p (XEXP (from, 0)))
819 || GET_CODE (from) == SUBREG))
820 from = force_reg (from_mode, from);
821 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
822 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
823 from = copy_to_reg (from);
824 emit_move_insn (to, gen_lowpart (to_mode, from));
828 /* Handle extension. */
829 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
831 /* Convert directly if that works. */
832 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
836 from = force_not_mem (from);
838 emit_unop_insn (code, to, from, equiv_code);
843 enum machine_mode intermediate;
847 /* Search for a mode to convert via. */
848 for (intermediate = from_mode; intermediate != VOIDmode;
849 intermediate = GET_MODE_WIDER_MODE (intermediate))
850 if (((can_extend_p (to_mode, intermediate, unsignedp)
852 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
853 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
854 GET_MODE_BITSIZE (intermediate))))
855 && (can_extend_p (intermediate, from_mode, unsignedp)
856 != CODE_FOR_nothing))
858 convert_move (to, convert_to_mode (intermediate, from,
859 unsignedp), unsignedp);
863 /* No suitable intermediate mode.
864 Generate what we need with shifts. */
865 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
866 - GET_MODE_BITSIZE (from_mode), 0);
867 from = gen_lowpart (to_mode, force_reg (from_mode, from));
868 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
870 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
873 emit_move_insn (to, tmp);
878 /* Support special truncate insns for certain modes. */
879 if (trunc_optab->handlers[to_mode][from_mode].insn_code != CODE_FOR_nothing)
881 emit_unop_insn (trunc_optab->handlers[to_mode][from_mode].insn_code,
886 /* Handle truncation of volatile memrefs, and so on;
887 the things that couldn't be truncated directly,
888 and for which there was no special instruction.
890 ??? Code above formerly short-circuited this, for most integer
891 mode pairs, with a force_reg in from_mode followed by a recursive
892 call to this routine. Appears always to have been wrong. */
893 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
895 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
896 emit_move_insn (to, temp);
900 /* Mode combination is not recognized. */
904 /* Return an rtx for a value that would result
905 from converting X to mode MODE.
906 Both X and MODE may be floating, or both integer.
907 UNSIGNEDP is nonzero if X is an unsigned value.
908 This can be done by referring to a part of X in place
909 or by copying to a new temporary with conversion.
911 This function *must not* call protect_from_queue
912 except when putting X into an insn (in which case convert_move does it). */
915 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
917 return convert_modes (mode, VOIDmode, x, unsignedp);
920 /* Return an rtx for a value that would result
921 from converting X from mode OLDMODE to mode MODE.
922 Both modes may be floating, or both integer.
923 UNSIGNEDP is nonzero if X is an unsigned value.
925 This can be done by referring to a part of X in place
926 or by copying to a new temporary with conversion.
928 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
930 This function *must not* call protect_from_queue
931 except when putting X into an insn (in which case convert_move does it). */
934 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
938 /* If FROM is a SUBREG that indicates that we have already done at least
939 the required extension, strip it. */
941 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
942 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
943 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
944 x = gen_lowpart (mode, x);
946 if (GET_MODE (x) != VOIDmode)
947 oldmode = GET_MODE (x);
952 /* There is one case that we must handle specially: If we are converting
953 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
954 we are to interpret the constant as unsigned, gen_lowpart will do
955 the wrong if the constant appears negative. What we want to do is
956 make the high-order word of the constant zero, not all ones. */
958 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
959 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
960 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
962 HOST_WIDE_INT val = INTVAL (x);
964 if (oldmode != VOIDmode
965 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
967 int width = GET_MODE_BITSIZE (oldmode);
969 /* We need to zero extend VAL. */
970 val &= ((HOST_WIDE_INT) 1 << width) - 1;
973 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
976 /* We can do this with a gen_lowpart if both desired and current modes
977 are integer, and this is either a constant integer, a register, or a
978 non-volatile MEM. Except for the constant case where MODE is no
979 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
981 if ((GET_CODE (x) == CONST_INT
982 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
983 || (GET_MODE_CLASS (mode) == MODE_INT
984 && GET_MODE_CLASS (oldmode) == MODE_INT
985 && (GET_CODE (x) == CONST_DOUBLE
986 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
987 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
988 && direct_load[(int) mode])
990 && (! HARD_REGISTER_P (x)
991 || HARD_REGNO_MODE_OK (REGNO (x), mode))
992 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
993 GET_MODE_BITSIZE (GET_MODE (x)))))))))
995 /* ?? If we don't know OLDMODE, we have to assume here that
996 X does not need sign- or zero-extension. This may not be
997 the case, but it's the best we can do. */
998 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
999 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1001 HOST_WIDE_INT val = INTVAL (x);
1002 int width = GET_MODE_BITSIZE (oldmode);
1004 /* We must sign or zero-extend in this case. Start by
1005 zero-extending, then sign extend if we need to. */
1006 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1008 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1009 val |= (HOST_WIDE_INT) (-1) << width;
1011 return gen_int_mode (val, mode);
1014 return gen_lowpart (mode, x);
1017 /* Converting from integer constant into mode is always equivalent to an
1018 subreg operation. */
1019 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
1021 if (GET_MODE_BITSIZE (mode) != GET_MODE_BITSIZE (oldmode))
1023 return simplify_gen_subreg (mode, x, oldmode, 0);
1026 temp = gen_reg_rtx (mode);
1027 convert_move (temp, x, unsignedp);
1031 /* STORE_MAX_PIECES is the number of bytes at a time that we can
1032 store efficiently. Due to internal GCC limitations, this is
1033 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1034 for an immediate constant. */
1036 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1038 /* Determine whether the LEN bytes can be moved by using several move
1039 instructions. Return nonzero if a call to move_by_pieces should
1043 can_move_by_pieces (unsigned HOST_WIDE_INT len,
1044 unsigned int align ATTRIBUTE_UNUSED)
1046 return MOVE_BY_PIECES_P (len, align);
1049 /* Generate several move instructions to copy LEN bytes from block FROM to
1050 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1051 and TO through protect_from_queue before calling.
1053 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1054 used to push FROM to the stack.
1056 ALIGN is maximum stack alignment we can assume.
1058 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
1059 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
1063 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
1064 unsigned int align, int endp)
1066 struct move_by_pieces data;
1067 rtx to_addr, from_addr = XEXP (from, 0);
1068 unsigned int max_size = MOVE_MAX_PIECES + 1;
1069 enum machine_mode mode = VOIDmode, tmode;
1070 enum insn_code icode;
1072 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
1075 data.from_addr = from_addr;
1078 to_addr = XEXP (to, 0);
1081 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1082 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1084 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1091 #ifdef STACK_GROWS_DOWNWARD
1097 data.to_addr = to_addr;
1100 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1101 || GET_CODE (from_addr) == POST_INC
1102 || GET_CODE (from_addr) == POST_DEC);
1104 data.explicit_inc_from = 0;
1105 data.explicit_inc_to = 0;
1106 if (data.reverse) data.offset = len;
1109 /* If copying requires more than two move insns,
1110 copy addresses to registers (to make displacements shorter)
1111 and use post-increment if available. */
1112 if (!(data.autinc_from && data.autinc_to)
1113 && move_by_pieces_ninsns (len, align) > 2)
1115 /* Find the mode of the largest move... */
1116 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1117 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1118 if (GET_MODE_SIZE (tmode) < max_size)
1121 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1123 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1124 data.autinc_from = 1;
1125 data.explicit_inc_from = -1;
1127 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1129 data.from_addr = copy_addr_to_reg (from_addr);
1130 data.autinc_from = 1;
1131 data.explicit_inc_from = 1;
1133 if (!data.autinc_from && CONSTANT_P (from_addr))
1134 data.from_addr = copy_addr_to_reg (from_addr);
1135 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1137 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1139 data.explicit_inc_to = -1;
1141 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1143 data.to_addr = copy_addr_to_reg (to_addr);
1145 data.explicit_inc_to = 1;
1147 if (!data.autinc_to && CONSTANT_P (to_addr))
1148 data.to_addr = copy_addr_to_reg (to_addr);
1151 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1152 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1153 align = MOVE_MAX * BITS_PER_UNIT;
1155 /* First move what we can in the largest integer mode, then go to
1156 successively smaller modes. */
1158 while (max_size > 1)
1160 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1161 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1162 if (GET_MODE_SIZE (tmode) < max_size)
1165 if (mode == VOIDmode)
1168 icode = mov_optab->handlers[(int) mode].insn_code;
1169 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1170 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1172 max_size = GET_MODE_SIZE (mode);
1175 /* The code above should have handled everything. */
1189 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1190 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1192 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1195 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1202 to1 = adjust_address (data.to, QImode, data.offset);
1210 /* Return number of insns required to move L bytes by pieces.
1211 ALIGN (in bits) is maximum alignment we can assume. */
1213 static unsigned HOST_WIDE_INT
1214 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align)
1216 unsigned HOST_WIDE_INT n_insns = 0;
1217 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1219 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1220 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1221 align = MOVE_MAX * BITS_PER_UNIT;
1223 while (max_size > 1)
1225 enum machine_mode mode = VOIDmode, tmode;
1226 enum insn_code icode;
1228 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1229 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1230 if (GET_MODE_SIZE (tmode) < max_size)
1233 if (mode == VOIDmode)
1236 icode = mov_optab->handlers[(int) mode].insn_code;
1237 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1238 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1240 max_size = GET_MODE_SIZE (mode);
1248 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1249 with move instructions for mode MODE. GENFUN is the gen_... function
1250 to make a move insn for that mode. DATA has all the other info. */
1253 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1254 struct move_by_pieces *data)
1256 unsigned int size = GET_MODE_SIZE (mode);
1257 rtx to1 = NULL_RTX, from1;
1259 while (data->len >= size)
1262 data->offset -= size;
1266 if (data->autinc_to)
1267 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1270 to1 = adjust_address (data->to, mode, data->offset);
1273 if (data->autinc_from)
1274 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1277 from1 = adjust_address (data->from, mode, data->offset);
1279 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1280 emit_insn (gen_add2_insn (data->to_addr,
1281 GEN_INT (-(HOST_WIDE_INT)size)));
1282 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1283 emit_insn (gen_add2_insn (data->from_addr,
1284 GEN_INT (-(HOST_WIDE_INT)size)));
1287 emit_insn ((*genfun) (to1, from1));
1290 #ifdef PUSH_ROUNDING
1291 emit_single_push_insn (mode, from1, NULL);
1297 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1298 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1299 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1300 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1302 if (! data->reverse)
1303 data->offset += size;
1309 /* Emit code to move a block Y to a block X. This may be done with
1310 string-move instructions, with multiple scalar move instructions,
1311 or with a library call.
1313 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1314 SIZE is an rtx that says how long they are.
1315 ALIGN is the maximum alignment we can assume they have.
1316 METHOD describes what kind of copy this is, and what mechanisms may be used.
1318 Return the address of the new block, if memcpy is called and returns it,
1322 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1330 case BLOCK_OP_NORMAL:
1331 may_use_call = true;
1334 case BLOCK_OP_CALL_PARM:
1335 may_use_call = block_move_libcall_safe_for_call_parm ();
1337 /* Make inhibit_defer_pop nonzero around the library call
1338 to force it to pop the arguments right away. */
1342 case BLOCK_OP_NO_LIBCALL:
1343 may_use_call = false;
1350 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1352 if (GET_MODE (x) != BLKmode)
1354 if (GET_MODE (y) != BLKmode)
1357 x = protect_from_queue (x, 1);
1358 y = protect_from_queue (y, 0);
1359 size = protect_from_queue (size, 0);
1361 if (GET_CODE (x) != MEM)
1363 if (GET_CODE (y) != MEM)
1368 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1369 can be incorrect is coming from __builtin_memcpy. */
1370 if (GET_CODE (size) == CONST_INT)
1372 if (INTVAL (size) == 0)
1375 x = shallow_copy_rtx (x);
1376 y = shallow_copy_rtx (y);
1377 set_mem_size (x, size);
1378 set_mem_size (y, size);
1381 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1382 move_by_pieces (x, y, INTVAL (size), align, 0);
1383 else if (emit_block_move_via_movstr (x, y, size, align))
1385 else if (may_use_call)
1386 retval = emit_block_move_via_libcall (x, y, size);
1388 emit_block_move_via_loop (x, y, size, align);
1390 if (method == BLOCK_OP_CALL_PARM)
1396 /* A subroutine of emit_block_move. Returns true if calling the
1397 block move libcall will not clobber any parameters which may have
1398 already been placed on the stack. */
1401 block_move_libcall_safe_for_call_parm (void)
1403 /* If arguments are pushed on the stack, then they're safe. */
1407 /* If registers go on the stack anyway, any argument is sure to clobber
1408 an outgoing argument. */
1409 #if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1411 tree fn = emit_block_move_libcall_fn (false);
1413 if (REG_PARM_STACK_SPACE (fn) != 0)
1418 /* If any argument goes in memory, then it might clobber an outgoing
1421 CUMULATIVE_ARGS args_so_far;
1424 fn = emit_block_move_libcall_fn (false);
1425 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1427 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1428 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1430 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1431 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1432 if (!tmp || !REG_P (tmp))
1434 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1435 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1439 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1445 /* A subroutine of emit_block_move. Expand a movstr pattern;
1446 return true if successful. */
1449 emit_block_move_via_movstr (rtx x, rtx y, rtx size, unsigned int align)
1451 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1452 int save_volatile_ok = volatile_ok;
1453 enum machine_mode mode;
1455 /* Since this is a move insn, we don't care about volatility. */
1458 /* Try the most limited insn first, because there's no point
1459 including more than one in the machine description unless
1460 the more limited one has some advantage. */
1462 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1463 mode = GET_MODE_WIDER_MODE (mode))
1465 enum insn_code code = movstr_optab[(int) mode];
1466 insn_operand_predicate_fn pred;
1468 if (code != CODE_FOR_nothing
1469 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1470 here because if SIZE is less than the mode mask, as it is
1471 returned by the macro, it will definitely be less than the
1472 actual mode mask. */
1473 && ((GET_CODE (size) == CONST_INT
1474 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1475 <= (GET_MODE_MASK (mode) >> 1)))
1476 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1477 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1478 || (*pred) (x, BLKmode))
1479 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1480 || (*pred) (y, BLKmode))
1481 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1482 || (*pred) (opalign, VOIDmode)))
1485 rtx last = get_last_insn ();
1488 op2 = convert_to_mode (mode, size, 1);
1489 pred = insn_data[(int) code].operand[2].predicate;
1490 if (pred != 0 && ! (*pred) (op2, mode))
1491 op2 = copy_to_mode_reg (mode, op2);
1493 /* ??? When called via emit_block_move_for_call, it'd be
1494 nice if there were some way to inform the backend, so
1495 that it doesn't fail the expansion because it thinks
1496 emitting the libcall would be more efficient. */
1498 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1502 volatile_ok = save_volatile_ok;
1506 delete_insns_since (last);
1510 volatile_ok = save_volatile_ok;
1514 /* A subroutine of emit_block_move. Expand a call to memcpy or bcopy.
1515 Return the return value from memcpy, 0 otherwise. */
1518 emit_block_move_via_libcall (rtx dst, rtx src, rtx size)
1520 rtx dst_addr, src_addr;
1521 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1522 enum machine_mode size_mode;
1525 /* DST, SRC, or SIZE may have been passed through protect_from_queue.
1527 It is unsafe to save the value generated by protect_from_queue and reuse
1528 it later. Consider what happens if emit_queue is called before the
1529 return value from protect_from_queue is used.
1531 Expansion of the CALL_EXPR below will call emit_queue before we are
1532 finished emitting RTL for argument setup. So if we are not careful we
1533 could get the wrong value for an argument.
1535 To avoid this problem we go ahead and emit code to copy the addresses of
1536 DST and SRC and SIZE into new pseudos. We can then place those new
1537 pseudos into an RTL_EXPR and use them later, even after a call to
1540 Note this is not strictly needed for library calls since they do not call
1541 emit_queue before loading their arguments. However, we may need to have
1542 library calls call emit_queue in the future since failing to do so could
1543 cause problems for targets which define SMALL_REGISTER_CLASSES and pass
1544 arguments in registers. */
1546 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1547 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1549 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1550 src_addr = convert_memory_address (ptr_mode, src_addr);
1552 dst_tree = make_tree (ptr_type_node, dst_addr);
1553 src_tree = make_tree (ptr_type_node, src_addr);
1555 if (TARGET_MEM_FUNCTIONS)
1556 size_mode = TYPE_MODE (sizetype);
1558 size_mode = TYPE_MODE (unsigned_type_node);
1560 size = convert_to_mode (size_mode, size, 1);
1561 size = copy_to_mode_reg (size_mode, size);
1563 /* It is incorrect to use the libcall calling conventions to call
1564 memcpy in this context. This could be a user call to memcpy and
1565 the user may wish to examine the return value from memcpy. For
1566 targets where libcalls and normal calls have different conventions
1567 for returning pointers, we could end up generating incorrect code.
1569 For convenience, we generate the call to bcopy this way as well. */
1571 if (TARGET_MEM_FUNCTIONS)
1572 size_tree = make_tree (sizetype, size);
1574 size_tree = make_tree (unsigned_type_node, size);
1576 fn = emit_block_move_libcall_fn (true);
1577 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1578 if (TARGET_MEM_FUNCTIONS)
1580 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1581 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1585 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1586 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1589 /* Now we have to build up the CALL_EXPR itself. */
1590 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1591 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1592 call_expr, arg_list, NULL_TREE);
1594 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1596 /* If we are initializing a readonly value, show the above call clobbered
1597 it. Otherwise, a load from it may erroneously be hoisted from a loop, or
1598 the delay slot scheduler might overlook conflicts and take nasty
1600 if (RTX_UNCHANGING_P (dst))
1601 add_function_usage_to
1602 (last_call_insn (), gen_rtx_EXPR_LIST (VOIDmode,
1603 gen_rtx_CLOBBER (VOIDmode, dst),
1606 return TARGET_MEM_FUNCTIONS ? retval : NULL_RTX;
1609 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1610 for the function we use for block copies. The first time FOR_CALL
1611 is true, we call assemble_external. */
1613 static GTY(()) tree block_move_fn;
1616 init_block_move_fn (const char *asmspec)
1622 if (TARGET_MEM_FUNCTIONS)
1624 fn = get_identifier ("memcpy");
1625 args = build_function_type_list (ptr_type_node, ptr_type_node,
1626 const_ptr_type_node, sizetype,
1631 fn = get_identifier ("bcopy");
1632 args = build_function_type_list (void_type_node, const_ptr_type_node,
1633 ptr_type_node, unsigned_type_node,
1637 fn = build_decl (FUNCTION_DECL, fn, args);
1638 DECL_EXTERNAL (fn) = 1;
1639 TREE_PUBLIC (fn) = 1;
1640 DECL_ARTIFICIAL (fn) = 1;
1641 TREE_NOTHROW (fn) = 1;
1648 SET_DECL_RTL (block_move_fn, NULL_RTX);
1649 SET_DECL_ASSEMBLER_NAME (block_move_fn, get_identifier (asmspec));
1654 emit_block_move_libcall_fn (int for_call)
1656 static bool emitted_extern;
1659 init_block_move_fn (NULL);
1661 if (for_call && !emitted_extern)
1663 emitted_extern = true;
1664 make_decl_rtl (block_move_fn, NULL);
1665 assemble_external (block_move_fn);
1668 return block_move_fn;
1671 /* A subroutine of emit_block_move. Copy the data via an explicit
1672 loop. This is used only when libcalls are forbidden. */
1673 /* ??? It'd be nice to copy in hunks larger than QImode. */
1676 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1677 unsigned int align ATTRIBUTE_UNUSED)
1679 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1680 enum machine_mode iter_mode;
1682 iter_mode = GET_MODE (size);
1683 if (iter_mode == VOIDmode)
1684 iter_mode = word_mode;
1686 top_label = gen_label_rtx ();
1687 cmp_label = gen_label_rtx ();
1688 iter = gen_reg_rtx (iter_mode);
1690 emit_move_insn (iter, const0_rtx);
1692 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1693 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1694 do_pending_stack_adjust ();
1696 emit_jump (cmp_label);
1697 emit_label (top_label);
1699 tmp = convert_modes (Pmode, iter_mode, iter, true);
1700 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1701 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1702 x = change_address (x, QImode, x_addr);
1703 y = change_address (y, QImode, y_addr);
1705 emit_move_insn (x, y);
1707 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1708 true, OPTAB_LIB_WIDEN);
1710 emit_move_insn (iter, tmp);
1712 emit_label (cmp_label);
1714 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1718 /* Copy all or part of a value X into registers starting at REGNO.
1719 The number of registers to be filled is NREGS. */
1722 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1725 #ifdef HAVE_load_multiple
1733 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1734 x = validize_mem (force_const_mem (mode, x));
1736 /* See if the machine can do this with a load multiple insn. */
1737 #ifdef HAVE_load_multiple
1738 if (HAVE_load_multiple)
1740 last = get_last_insn ();
1741 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1749 delete_insns_since (last);
1753 for (i = 0; i < nregs; i++)
1754 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1755 operand_subword_force (x, i, mode));
1758 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1759 The number of registers to be filled is NREGS. */
1762 move_block_from_reg (int regno, rtx x, int nregs)
1769 /* See if the machine can do this with a store multiple insn. */
1770 #ifdef HAVE_store_multiple
1771 if (HAVE_store_multiple)
1773 rtx last = get_last_insn ();
1774 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1782 delete_insns_since (last);
1786 for (i = 0; i < nregs; i++)
1788 rtx tem = operand_subword (x, i, 1, BLKmode);
1793 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1797 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1798 ORIG, where ORIG is a non-consecutive group of registers represented by
1799 a PARALLEL. The clone is identical to the original except in that the
1800 original set of registers is replaced by a new set of pseudo registers.
1801 The new set has the same modes as the original set. */
1804 gen_group_rtx (rtx orig)
1809 if (GET_CODE (orig) != PARALLEL)
1812 length = XVECLEN (orig, 0);
1813 tmps = alloca (sizeof (rtx) * length);
1815 /* Skip a NULL entry in first slot. */
1816 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1821 for (; i < length; i++)
1823 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1824 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1826 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1829 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1832 /* Emit code to move a block ORIG_SRC of type TYPE to a block DST,
1833 where DST is non-consecutive registers represented by a PARALLEL.
1834 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1838 emit_group_load (rtx dst, rtx orig_src, tree type ATTRIBUTE_UNUSED, int ssize)
1843 if (GET_CODE (dst) != PARALLEL)
1846 /* Check for a NULL entry, used to indicate that the parameter goes
1847 both on the stack and in registers. */
1848 if (XEXP (XVECEXP (dst, 0, 0), 0))
1853 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
1855 /* Process the pieces. */
1856 for (i = start; i < XVECLEN (dst, 0); i++)
1858 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1859 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1860 unsigned int bytelen = GET_MODE_SIZE (mode);
1863 /* Handle trailing fragments that run over the size of the struct. */
1864 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1866 /* Arrange to shift the fragment to where it belongs.
1867 extract_bit_field loads to the lsb of the reg. */
1869 #ifdef BLOCK_REG_PADDING
1870 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1871 == (BYTES_BIG_ENDIAN ? upward : downward)
1876 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1877 bytelen = ssize - bytepos;
1882 /* If we won't be loading directly from memory, protect the real source
1883 from strange tricks we might play; but make sure that the source can
1884 be loaded directly into the destination. */
1886 if (GET_CODE (orig_src) != MEM
1887 && (!CONSTANT_P (orig_src)
1888 || (GET_MODE (orig_src) != mode
1889 && GET_MODE (orig_src) != VOIDmode)))
1891 if (GET_MODE (orig_src) == VOIDmode)
1892 src = gen_reg_rtx (mode);
1894 src = gen_reg_rtx (GET_MODE (orig_src));
1896 emit_move_insn (src, orig_src);
1899 /* Optimize the access just a bit. */
1900 if (GET_CODE (src) == MEM
1901 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1902 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1903 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1904 && bytelen == GET_MODE_SIZE (mode))
1906 tmps[i] = gen_reg_rtx (mode);
1907 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1909 else if (GET_CODE (src) == CONCAT)
1911 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1912 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1914 if ((bytepos == 0 && bytelen == slen0)
1915 || (bytepos != 0 && bytepos + bytelen <= slen))
1917 /* The following assumes that the concatenated objects all
1918 have the same size. In this case, a simple calculation
1919 can be used to determine the object and the bit field
1921 tmps[i] = XEXP (src, bytepos / slen0);
1922 if (! CONSTANT_P (tmps[i])
1923 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1924 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1925 (bytepos % slen0) * BITS_PER_UNIT,
1926 1, NULL_RTX, mode, mode, ssize);
1928 else if (bytepos == 0)
1930 rtx mem = assign_stack_temp (GET_MODE (src), slen, 0);
1931 emit_move_insn (mem, src);
1932 tmps[i] = adjust_address (mem, mode, 0);
1937 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1938 SIMD register, which is currently broken. While we get GCC
1939 to emit proper RTL for these cases, let's dump to memory. */
1940 else if (VECTOR_MODE_P (GET_MODE (dst))
1943 int slen = GET_MODE_SIZE (GET_MODE (src));
1946 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1947 emit_move_insn (mem, src);
1948 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1950 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1951 && XVECLEN (dst, 0) > 1)
1952 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1953 else if (CONSTANT_P (src)
1954 || (REG_P (src) && GET_MODE (src) == mode))
1957 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1958 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1962 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
1963 tmps[i], 0, OPTAB_WIDEN);
1968 /* Copy the extracted pieces into the proper (probable) hard regs. */
1969 for (i = start; i < XVECLEN (dst, 0); i++)
1970 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
1973 /* Emit code to move a block SRC to block DST, where SRC and DST are
1974 non-consecutive groups of registers, each represented by a PARALLEL. */
1977 emit_group_move (rtx dst, rtx src)
1981 if (GET_CODE (src) != PARALLEL
1982 || GET_CODE (dst) != PARALLEL
1983 || XVECLEN (src, 0) != XVECLEN (dst, 0))
1986 /* Skip first entry if NULL. */
1987 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1988 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1989 XEXP (XVECEXP (src, 0, i), 0));
1992 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1993 where SRC is non-consecutive registers represented by a PARALLEL.
1994 SSIZE represents the total size of block ORIG_DST, or -1 if not
1998 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
2003 if (GET_CODE (src) != PARALLEL)
2006 /* Check for a NULL entry, used to indicate that the parameter goes
2007 both on the stack and in registers. */
2008 if (XEXP (XVECEXP (src, 0, 0), 0))
2013 tmps = alloca (sizeof (rtx) * XVECLEN (src, 0));
2015 /* Copy the (probable) hard regs into pseudos. */
2016 for (i = start; i < XVECLEN (src, 0); i++)
2018 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2019 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2020 emit_move_insn (tmps[i], reg);
2024 /* If we won't be storing directly into memory, protect the real destination
2025 from strange tricks we might play. */
2027 if (GET_CODE (dst) == PARALLEL)
2031 /* We can get a PARALLEL dst if there is a conditional expression in
2032 a return statement. In that case, the dst and src are the same,
2033 so no action is necessary. */
2034 if (rtx_equal_p (dst, src))
2037 /* It is unclear if we can ever reach here, but we may as well handle
2038 it. Allocate a temporary, and split this into a store/load to/from
2041 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2042 emit_group_store (temp, src, type, ssize);
2043 emit_group_load (dst, temp, type, ssize);
2046 else if (GET_CODE (dst) != MEM && GET_CODE (dst) != CONCAT)
2048 dst = gen_reg_rtx (GET_MODE (orig_dst));
2049 /* Make life a bit easier for combine. */
2050 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
2053 /* Process the pieces. */
2054 for (i = start; i < XVECLEN (src, 0); i++)
2056 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2057 enum machine_mode mode = GET_MODE (tmps[i]);
2058 unsigned int bytelen = GET_MODE_SIZE (mode);
2061 /* Handle trailing fragments that run over the size of the struct. */
2062 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2064 /* store_bit_field always takes its value from the lsb.
2065 Move the fragment to the lsb if it's not already there. */
2067 #ifdef BLOCK_REG_PADDING
2068 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2069 == (BYTES_BIG_ENDIAN ? upward : downward)
2075 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2076 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2077 tmps[i], 0, OPTAB_WIDEN);
2079 bytelen = ssize - bytepos;
2082 if (GET_CODE (dst) == CONCAT)
2084 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2085 dest = XEXP (dst, 0);
2086 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2088 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2089 dest = XEXP (dst, 1);
2091 else if (bytepos == 0 && XVECLEN (src, 0))
2093 dest = assign_stack_temp (GET_MODE (dest),
2094 GET_MODE_SIZE (GET_MODE (dest)), 0);
2095 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2104 /* Optimize the access just a bit. */
2105 if (GET_CODE (dest) == MEM
2106 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2107 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2108 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2109 && bytelen == GET_MODE_SIZE (mode))
2110 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2112 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2113 mode, tmps[i], ssize);
2118 /* Copy from the pseudo into the (probable) hard reg. */
2119 if (orig_dst != dst)
2120 emit_move_insn (orig_dst, dst);
2123 /* Generate code to copy a BLKmode object of TYPE out of a
2124 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2125 is null, a stack temporary is created. TGTBLK is returned.
2127 The purpose of this routine is to handle functions that return
2128 BLKmode structures in registers. Some machines (the PA for example)
2129 want to return all small structures in registers regardless of the
2130 structure's alignment. */
2133 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2135 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2136 rtx src = NULL, dst = NULL;
2137 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2138 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2142 tgtblk = assign_temp (build_qualified_type (type,
2144 | TYPE_QUAL_CONST)),
2146 preserve_temp_slots (tgtblk);
2149 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2150 into a new pseudo which is a full word. */
2152 if (GET_MODE (srcreg) != BLKmode
2153 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2154 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2156 /* If the structure doesn't take up a whole number of words, see whether
2157 SRCREG is padded on the left or on the right. If it's on the left,
2158 set PADDING_CORRECTION to the number of bits to skip.
2160 In most ABIs, the structure will be returned at the least end of
2161 the register, which translates to right padding on little-endian
2162 targets and left padding on big-endian targets. The opposite
2163 holds if the structure is returned at the most significant
2164 end of the register. */
2165 if (bytes % UNITS_PER_WORD != 0
2166 && (targetm.calls.return_in_msb (type)
2168 : BYTES_BIG_ENDIAN))
2170 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2172 /* Copy the structure BITSIZE bites at a time.
2174 We could probably emit more efficient code for machines which do not use
2175 strict alignment, but it doesn't seem worth the effort at the current
2177 for (bitpos = 0, xbitpos = padding_correction;
2178 bitpos < bytes * BITS_PER_UNIT;
2179 bitpos += bitsize, xbitpos += bitsize)
2181 /* We need a new source operand each time xbitpos is on a
2182 word boundary and when xbitpos == padding_correction
2183 (the first time through). */
2184 if (xbitpos % BITS_PER_WORD == 0
2185 || xbitpos == padding_correction)
2186 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2189 /* We need a new destination operand each time bitpos is on
2191 if (bitpos % BITS_PER_WORD == 0)
2192 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2194 /* Use xbitpos for the source extraction (right justified) and
2195 xbitpos for the destination store (left justified). */
2196 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2197 extract_bit_field (src, bitsize,
2198 xbitpos % BITS_PER_WORD, 1,
2199 NULL_RTX, word_mode, word_mode,
2207 /* Add a USE expression for REG to the (possibly empty) list pointed
2208 to by CALL_FUSAGE. REG must denote a hard register. */
2211 use_reg (rtx *call_fusage, rtx reg)
2214 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2218 = gen_rtx_EXPR_LIST (VOIDmode,
2219 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2222 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2223 starting at REGNO. All of these registers must be hard registers. */
2226 use_regs (rtx *call_fusage, int regno, int nregs)
2230 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2233 for (i = 0; i < nregs; i++)
2234 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2237 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2238 PARALLEL REGS. This is for calls that pass values in multiple
2239 non-contiguous locations. The Irix 6 ABI has examples of this. */
2242 use_group_regs (rtx *call_fusage, rtx regs)
2246 for (i = 0; i < XVECLEN (regs, 0); i++)
2248 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2250 /* A NULL entry means the parameter goes both on the stack and in
2251 registers. This can also be a MEM for targets that pass values
2252 partially on the stack and partially in registers. */
2253 if (reg != 0 && REG_P (reg))
2254 use_reg (call_fusage, reg);
2259 /* Determine whether the LEN bytes generated by CONSTFUN can be
2260 stored to memory using several move instructions. CONSTFUNDATA is
2261 a pointer which will be passed as argument in every CONSTFUN call.
2262 ALIGN is maximum alignment we can assume. Return nonzero if a
2263 call to store_by_pieces should succeed. */
2266 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2267 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2268 void *constfundata, unsigned int align)
2270 unsigned HOST_WIDE_INT max_size, l;
2271 HOST_WIDE_INT offset = 0;
2272 enum machine_mode mode, tmode;
2273 enum insn_code icode;
2280 if (! STORE_BY_PIECES_P (len, align))
2283 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2284 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2285 align = MOVE_MAX * BITS_PER_UNIT;
2287 /* We would first store what we can in the largest integer mode, then go to
2288 successively smaller modes. */
2291 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2296 max_size = STORE_MAX_PIECES + 1;
2297 while (max_size > 1)
2299 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2300 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2301 if (GET_MODE_SIZE (tmode) < max_size)
2304 if (mode == VOIDmode)
2307 icode = mov_optab->handlers[(int) mode].insn_code;
2308 if (icode != CODE_FOR_nothing
2309 && align >= GET_MODE_ALIGNMENT (mode))
2311 unsigned int size = GET_MODE_SIZE (mode);
2318 cst = (*constfun) (constfundata, offset, mode);
2319 if (!LEGITIMATE_CONSTANT_P (cst))
2329 max_size = GET_MODE_SIZE (mode);
2332 /* The code above should have handled everything. */
2340 /* Generate several move instructions to store LEN bytes generated by
2341 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2342 pointer which will be passed as argument in every CONSTFUN call.
2343 ALIGN is maximum alignment we can assume.
2344 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2345 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2349 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2350 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2351 void *constfundata, unsigned int align, int endp)
2353 struct store_by_pieces data;
2362 if (! STORE_BY_PIECES_P (len, align))
2364 to = protect_from_queue (to, 1);
2365 data.constfun = constfun;
2366 data.constfundata = constfundata;
2369 store_by_pieces_1 (&data, align);
2380 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2381 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2383 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2386 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2393 to1 = adjust_address (data.to, QImode, data.offset);
2401 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2402 rtx with BLKmode). The caller must pass TO through protect_from_queue
2403 before calling. ALIGN is maximum alignment we can assume. */
2406 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2408 struct store_by_pieces data;
2413 data.constfun = clear_by_pieces_1;
2414 data.constfundata = NULL;
2417 store_by_pieces_1 (&data, align);
2420 /* Callback routine for clear_by_pieces.
2421 Return const0_rtx unconditionally. */
2424 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2425 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2426 enum machine_mode mode ATTRIBUTE_UNUSED)
2431 /* Subroutine of clear_by_pieces and store_by_pieces.
2432 Generate several move instructions to store LEN bytes of block TO. (A MEM
2433 rtx with BLKmode). The caller must pass TO through protect_from_queue
2434 before calling. ALIGN is maximum alignment we can assume. */
2437 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2438 unsigned int align ATTRIBUTE_UNUSED)
2440 rtx to_addr = XEXP (data->to, 0);
2441 unsigned HOST_WIDE_INT max_size = STORE_MAX_PIECES + 1;
2442 enum machine_mode mode = VOIDmode, tmode;
2443 enum insn_code icode;
2446 data->to_addr = to_addr;
2448 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2449 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2451 data->explicit_inc_to = 0;
2453 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2455 data->offset = data->len;
2457 /* If storing requires more than two move insns,
2458 copy addresses to registers (to make displacements shorter)
2459 and use post-increment if available. */
2460 if (!data->autinc_to
2461 && move_by_pieces_ninsns (data->len, align) > 2)
2463 /* Determine the main mode we'll be using. */
2464 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2465 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2466 if (GET_MODE_SIZE (tmode) < max_size)
2469 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2471 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2472 data->autinc_to = 1;
2473 data->explicit_inc_to = -1;
2476 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2477 && ! data->autinc_to)
2479 data->to_addr = copy_addr_to_reg (to_addr);
2480 data->autinc_to = 1;
2481 data->explicit_inc_to = 1;
2484 if ( !data->autinc_to && CONSTANT_P (to_addr))
2485 data->to_addr = copy_addr_to_reg (to_addr);
2488 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2489 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2490 align = MOVE_MAX * BITS_PER_UNIT;
2492 /* First store what we can in the largest integer mode, then go to
2493 successively smaller modes. */
2495 while (max_size > 1)
2497 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2498 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2499 if (GET_MODE_SIZE (tmode) < max_size)
2502 if (mode == VOIDmode)
2505 icode = mov_optab->handlers[(int) mode].insn_code;
2506 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2507 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2509 max_size = GET_MODE_SIZE (mode);
2512 /* The code above should have handled everything. */
2517 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2518 with move instructions for mode MODE. GENFUN is the gen_... function
2519 to make a move insn for that mode. DATA has all the other info. */
2522 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2523 struct store_by_pieces *data)
2525 unsigned int size = GET_MODE_SIZE (mode);
2528 while (data->len >= size)
2531 data->offset -= size;
2533 if (data->autinc_to)
2534 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2537 to1 = adjust_address (data->to, mode, data->offset);
2539 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2540 emit_insn (gen_add2_insn (data->to_addr,
2541 GEN_INT (-(HOST_WIDE_INT) size)));
2543 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2544 emit_insn ((*genfun) (to1, cst));
2546 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2547 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2549 if (! data->reverse)
2550 data->offset += size;
2556 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2557 its length in bytes. */
2560 clear_storage (rtx object, rtx size)
2563 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2564 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2566 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2567 just move a zero. Otherwise, do this a piece at a time. */
2568 if (GET_MODE (object) != BLKmode
2569 && GET_CODE (size) == CONST_INT
2570 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
2571 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2574 object = protect_from_queue (object, 1);
2575 size = protect_from_queue (size, 0);
2577 if (size == const0_rtx)
2579 else if (GET_CODE (size) == CONST_INT
2580 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2581 clear_by_pieces (object, INTVAL (size), align);
2582 else if (clear_storage_via_clrstr (object, size, align))
2585 retval = clear_storage_via_libcall (object, size);
2591 /* A subroutine of clear_storage. Expand a clrstr pattern;
2592 return true if successful. */
2595 clear_storage_via_clrstr (rtx object, rtx size, unsigned int align)
2597 /* Try the most limited insn first, because there's no point
2598 including more than one in the machine description unless
2599 the more limited one has some advantage. */
2601 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2602 enum machine_mode mode;
2604 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2605 mode = GET_MODE_WIDER_MODE (mode))
2607 enum insn_code code = clrstr_optab[(int) mode];
2608 insn_operand_predicate_fn pred;
2610 if (code != CODE_FOR_nothing
2611 /* We don't need MODE to be narrower than
2612 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2613 the mode mask, as it is returned by the macro, it will
2614 definitely be less than the actual mode mask. */
2615 && ((GET_CODE (size) == CONST_INT
2616 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2617 <= (GET_MODE_MASK (mode) >> 1)))
2618 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2619 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2620 || (*pred) (object, BLKmode))
2621 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2622 || (*pred) (opalign, VOIDmode)))
2625 rtx last = get_last_insn ();
2628 op1 = convert_to_mode (mode, size, 1);
2629 pred = insn_data[(int) code].operand[1].predicate;
2630 if (pred != 0 && ! (*pred) (op1, mode))
2631 op1 = copy_to_mode_reg (mode, op1);
2633 pat = GEN_FCN ((int) code) (object, op1, opalign);
2640 delete_insns_since (last);
2647 /* A subroutine of clear_storage. Expand a call to memset or bzero.
2648 Return the return value of memset, 0 otherwise. */
2651 clear_storage_via_libcall (rtx object, rtx size)
2653 tree call_expr, arg_list, fn, object_tree, size_tree;
2654 enum machine_mode size_mode;
2657 /* OBJECT or SIZE may have been passed through protect_from_queue.
2659 It is unsafe to save the value generated by protect_from_queue
2660 and reuse it later. Consider what happens if emit_queue is
2661 called before the return value from protect_from_queue is used.
2663 Expansion of the CALL_EXPR below will call emit_queue before
2664 we are finished emitting RTL for argument setup. So if we are
2665 not careful we could get the wrong value for an argument.
2667 To avoid this problem we go ahead and emit code to copy OBJECT
2668 and SIZE into new pseudos. We can then place those new pseudos
2669 into an RTL_EXPR and use them later, even after a call to
2672 Note this is not strictly needed for library calls since they
2673 do not call emit_queue before loading their arguments. However,
2674 we may need to have library calls call emit_queue in the future
2675 since failing to do so could cause problems for targets which
2676 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2678 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2680 if (TARGET_MEM_FUNCTIONS)
2681 size_mode = TYPE_MODE (sizetype);
2683 size_mode = TYPE_MODE (unsigned_type_node);
2684 size = convert_to_mode (size_mode, size, 1);
2685 size = copy_to_mode_reg (size_mode, size);
2687 /* It is incorrect to use the libcall calling conventions to call
2688 memset in this context. This could be a user call to memset and
2689 the user may wish to examine the return value from memset. For
2690 targets where libcalls and normal calls have different conventions
2691 for returning pointers, we could end up generating incorrect code.
2693 For convenience, we generate the call to bzero this way as well. */
2695 object_tree = make_tree (ptr_type_node, object);
2696 if (TARGET_MEM_FUNCTIONS)
2697 size_tree = make_tree (sizetype, size);
2699 size_tree = make_tree (unsigned_type_node, size);
2701 fn = clear_storage_libcall_fn (true);
2702 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
2703 if (TARGET_MEM_FUNCTIONS)
2704 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
2705 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
2707 /* Now we have to build up the CALL_EXPR itself. */
2708 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2709 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2710 call_expr, arg_list, NULL_TREE);
2712 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2714 /* If we are initializing a readonly value, show the above call
2715 clobbered it. Otherwise, a load from it may erroneously be
2716 hoisted from a loop. */
2717 if (RTX_UNCHANGING_P (object))
2718 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
2720 return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
2723 /* A subroutine of clear_storage_via_libcall. Create the tree node
2724 for the function we use for block clears. The first time FOR_CALL
2725 is true, we call assemble_external. */
2727 static GTY(()) tree block_clear_fn;
2730 init_block_clear_fn (const char *asmspec)
2732 if (!block_clear_fn)
2736 if (TARGET_MEM_FUNCTIONS)
2738 fn = get_identifier ("memset");
2739 args = build_function_type_list (ptr_type_node, ptr_type_node,
2740 integer_type_node, sizetype,
2745 fn = get_identifier ("bzero");
2746 args = build_function_type_list (void_type_node, ptr_type_node,
2747 unsigned_type_node, NULL_TREE);
2750 fn = build_decl (FUNCTION_DECL, fn, args);
2751 DECL_EXTERNAL (fn) = 1;
2752 TREE_PUBLIC (fn) = 1;
2753 DECL_ARTIFICIAL (fn) = 1;
2754 TREE_NOTHROW (fn) = 1;
2756 block_clear_fn = fn;
2761 SET_DECL_RTL (block_clear_fn, NULL_RTX);
2762 SET_DECL_ASSEMBLER_NAME (block_clear_fn, get_identifier (asmspec));
2767 clear_storage_libcall_fn (int for_call)
2769 static bool emitted_extern;
2771 if (!block_clear_fn)
2772 init_block_clear_fn (NULL);
2774 if (for_call && !emitted_extern)
2776 emitted_extern = true;
2777 make_decl_rtl (block_clear_fn, NULL);
2778 assemble_external (block_clear_fn);
2781 return block_clear_fn;
2784 /* Generate code to copy Y into X.
2785 Both Y and X must have the same mode, except that
2786 Y can be a constant with VOIDmode.
2787 This mode cannot be BLKmode; use emit_block_move for that.
2789 Return the last instruction emitted. */
2792 emit_move_insn (rtx x, rtx y)
2794 enum machine_mode mode = GET_MODE (x);
2795 rtx y_cst = NULL_RTX;
2798 x = protect_from_queue (x, 1);
2799 y = protect_from_queue (y, 0);
2801 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2807 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
2808 && (last_insn = compress_float_constant (x, y)))
2813 if (!LEGITIMATE_CONSTANT_P (y))
2815 y = force_const_mem (mode, y);
2817 /* If the target's cannot_force_const_mem prevented the spill,
2818 assume that the target's move expanders will also take care
2819 of the non-legitimate constant. */
2825 /* If X or Y are memory references, verify that their addresses are valid
2827 if (GET_CODE (x) == MEM
2828 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2829 && ! push_operand (x, GET_MODE (x)))
2831 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2832 x = validize_mem (x);
2834 if (GET_CODE (y) == MEM
2835 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2837 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2838 y = validize_mem (y);
2840 if (mode == BLKmode)
2843 last_insn = emit_move_insn_1 (x, y);
2845 if (y_cst && REG_P (x)
2846 && (set = single_set (last_insn)) != NULL_RTX
2847 && SET_DEST (set) == x
2848 && ! rtx_equal_p (y_cst, SET_SRC (set)))
2849 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
2854 /* Low level part of emit_move_insn.
2855 Called just like emit_move_insn, but assumes X and Y
2856 are basically valid. */
2859 emit_move_insn_1 (rtx x, rtx y)
2861 enum machine_mode mode = GET_MODE (x);
2862 enum machine_mode submode;
2863 enum mode_class class = GET_MODE_CLASS (mode);
2865 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
2868 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2870 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2872 /* Expand complex moves by moving real part and imag part, if possible. */
2873 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2874 && BLKmode != (submode = GET_MODE_INNER (mode))
2875 && (mov_optab->handlers[(int) submode].insn_code
2876 != CODE_FOR_nothing))
2878 /* Don't split destination if it is a stack push. */
2879 int stack = push_operand (x, GET_MODE (x));
2881 #ifdef PUSH_ROUNDING
2882 /* In case we output to the stack, but the size is smaller than the
2883 machine can push exactly, we need to use move instructions. */
2885 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
2886 != GET_MODE_SIZE (submode)))
2889 HOST_WIDE_INT offset1, offset2;
2891 /* Do not use anti_adjust_stack, since we don't want to update
2892 stack_pointer_delta. */
2893 temp = expand_binop (Pmode,
2894 #ifdef STACK_GROWS_DOWNWARD
2902 (GET_MODE_SIZE (GET_MODE (x)))),
2903 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
2905 if (temp != stack_pointer_rtx)
2906 emit_move_insn (stack_pointer_rtx, temp);
2908 #ifdef STACK_GROWS_DOWNWARD
2910 offset2 = GET_MODE_SIZE (submode);
2912 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2913 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2914 + GET_MODE_SIZE (submode));
2917 emit_move_insn (change_address (x, submode,
2918 gen_rtx_PLUS (Pmode,
2920 GEN_INT (offset1))),
2921 gen_realpart (submode, y));
2922 emit_move_insn (change_address (x, submode,
2923 gen_rtx_PLUS (Pmode,
2925 GEN_INT (offset2))),
2926 gen_imagpart (submode, y));
2930 /* If this is a stack, push the highpart first, so it
2931 will be in the argument order.
2933 In that case, change_address is used only to convert
2934 the mode, not to change the address. */
2937 /* Note that the real part always precedes the imag part in memory
2938 regardless of machine's endianness. */
2939 #ifdef STACK_GROWS_DOWNWARD
2940 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2941 gen_imagpart (submode, y));
2942 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2943 gen_realpart (submode, y));
2945 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2946 gen_realpart (submode, y));
2947 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2948 gen_imagpart (submode, y));
2953 rtx realpart_x, realpart_y;
2954 rtx imagpart_x, imagpart_y;
2956 /* If this is a complex value with each part being smaller than a
2957 word, the usual calling sequence will likely pack the pieces into
2958 a single register. Unfortunately, SUBREG of hard registers only
2959 deals in terms of words, so we have a problem converting input
2960 arguments to the CONCAT of two registers that is used elsewhere
2961 for complex values. If this is before reload, we can copy it into
2962 memory and reload. FIXME, we should see about using extract and
2963 insert on integer registers, but complex short and complex char
2964 variables should be rarely used. */
2965 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2966 && (reload_in_progress | reload_completed) == 0)
2969 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2971 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2973 if (packed_dest_p || packed_src_p)
2975 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2976 ? MODE_FLOAT : MODE_INT);
2978 enum machine_mode reg_mode
2979 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2981 if (reg_mode != BLKmode)
2983 rtx mem = assign_stack_temp (reg_mode,
2984 GET_MODE_SIZE (mode), 0);
2985 rtx cmem = adjust_address (mem, mode, 0);
2989 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2991 emit_move_insn_1 (cmem, y);
2992 return emit_move_insn_1 (sreg, mem);
2996 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2998 emit_move_insn_1 (mem, sreg);
2999 return emit_move_insn_1 (x, cmem);
3005 realpart_x = gen_realpart (submode, x);
3006 realpart_y = gen_realpart (submode, y);
3007 imagpart_x = gen_imagpart (submode, x);
3008 imagpart_y = gen_imagpart (submode, y);
3010 /* Show the output dies here. This is necessary for SUBREGs
3011 of pseudos since we cannot track their lifetimes correctly;
3012 hard regs shouldn't appear here except as return values.
3013 We never want to emit such a clobber after reload. */
3015 && ! (reload_in_progress || reload_completed)
3016 && (GET_CODE (realpart_x) == SUBREG
3017 || GET_CODE (imagpart_x) == SUBREG))
3018 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3020 emit_move_insn (realpart_x, realpart_y);
3021 emit_move_insn (imagpart_x, imagpart_y);
3024 return get_last_insn ();
3027 /* Handle MODE_CC modes: If we don't have a special move insn for this mode,
3028 find a mode to do it in. If we have a movcc, use it. Otherwise,
3029 find the MODE_INT mode of the same width. */
3030 else if (GET_MODE_CLASS (mode) == MODE_CC
3031 && mov_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
3033 enum insn_code insn_code;
3034 enum machine_mode tmode = VOIDmode;
3038 && mov_optab->handlers[(int) CCmode].insn_code != CODE_FOR_nothing)
3041 for (tmode = QImode; tmode != VOIDmode;
3042 tmode = GET_MODE_WIDER_MODE (tmode))
3043 if (GET_MODE_SIZE (tmode) == GET_MODE_SIZE (mode))
3046 if (tmode == VOIDmode)
3049 /* Get X and Y in TMODE. We can't use gen_lowpart here because it
3050 may call change_address which is not appropriate if we were
3051 called when a reload was in progress. We don't have to worry
3052 about changing the address since the size in bytes is supposed to
3053 be the same. Copy the MEM to change the mode and move any
3054 substitutions from the old MEM to the new one. */
3056 if (reload_in_progress)
3058 x = gen_lowpart_common (tmode, x1);
3059 if (x == 0 && GET_CODE (x1) == MEM)
3061 x = adjust_address_nv (x1, tmode, 0);
3062 copy_replacements (x1, x);
3065 y = gen_lowpart_common (tmode, y1);
3066 if (y == 0 && GET_CODE (y1) == MEM)
3068 y = adjust_address_nv (y1, tmode, 0);
3069 copy_replacements (y1, y);
3074 x = gen_lowpart (tmode, x);
3075 y = gen_lowpart (tmode, y);
3078 insn_code = mov_optab->handlers[(int) tmode].insn_code;
3079 return emit_insn (GEN_FCN (insn_code) (x, y));
3082 /* Try using a move pattern for the corresponding integer mode. This is
3083 only safe when simplify_subreg can convert MODE constants into integer
3084 constants. At present, it can only do this reliably if the value
3085 fits within a HOST_WIDE_INT. */
3086 else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3087 && (submode = int_mode_for_mode (mode)) != BLKmode
3088 && mov_optab->handlers[submode].insn_code != CODE_FOR_nothing)
3089 return emit_insn (GEN_FCN (mov_optab->handlers[submode].insn_code)
3090 (simplify_gen_subreg (submode, x, mode, 0),
3091 simplify_gen_subreg (submode, y, mode, 0)));
3093 /* This will handle any multi-word or full-word mode that lacks a move_insn
3094 pattern. However, you will get better code if you define such patterns,
3095 even if they must turn into multiple assembler instructions. */
3096 else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
3103 #ifdef PUSH_ROUNDING
3105 /* If X is a push on the stack, do the push now and replace
3106 X with a reference to the stack pointer. */
3107 if (push_operand (x, GET_MODE (x)))
3112 /* Do not use anti_adjust_stack, since we don't want to update
3113 stack_pointer_delta. */
3114 temp = expand_binop (Pmode,
3115 #ifdef STACK_GROWS_DOWNWARD
3123 (GET_MODE_SIZE (GET_MODE (x)))),
3124 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3126 if (temp != stack_pointer_rtx)
3127 emit_move_insn (stack_pointer_rtx, temp);
3129 code = GET_CODE (XEXP (x, 0));
3131 /* Just hope that small offsets off SP are OK. */
3132 if (code == POST_INC)
3133 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3134 GEN_INT (-((HOST_WIDE_INT)
3135 GET_MODE_SIZE (GET_MODE (x)))));
3136 else if (code == POST_DEC)
3137 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3138 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3140 temp = stack_pointer_rtx;
3142 x = change_address (x, VOIDmode, temp);
3146 /* If we are in reload, see if either operand is a MEM whose address
3147 is scheduled for replacement. */
3148 if (reload_in_progress && GET_CODE (x) == MEM
3149 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3150 x = replace_equiv_address_nv (x, inner);
3151 if (reload_in_progress && GET_CODE (y) == MEM
3152 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3153 y = replace_equiv_address_nv (y, inner);
3159 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3162 rtx xpart = operand_subword (x, i, 1, mode);
3163 rtx ypart = operand_subword (y, i, 1, mode);
3165 /* If we can't get a part of Y, put Y into memory if it is a
3166 constant. Otherwise, force it into a register. If we still
3167 can't get a part of Y, abort. */
3168 if (ypart == 0 && CONSTANT_P (y))
3170 y = force_const_mem (mode, y);
3171 ypart = operand_subword (y, i, 1, mode);
3173 else if (ypart == 0)
3174 ypart = operand_subword_force (y, i, mode);
3176 if (xpart == 0 || ypart == 0)
3179 need_clobber |= (GET_CODE (xpart) == SUBREG);
3181 last_insn = emit_move_insn (xpart, ypart);
3187 /* Show the output dies here. This is necessary for SUBREGs
3188 of pseudos since we cannot track their lifetimes correctly;
3189 hard regs shouldn't appear here except as return values.
3190 We never want to emit such a clobber after reload. */
3192 && ! (reload_in_progress || reload_completed)
3193 && need_clobber != 0)
3194 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3204 /* If Y is representable exactly in a narrower mode, and the target can
3205 perform the extension directly from constant or memory, then emit the
3206 move as an extension. */
3209 compress_float_constant (rtx x, rtx y)
3211 enum machine_mode dstmode = GET_MODE (x);
3212 enum machine_mode orig_srcmode = GET_MODE (y);
3213 enum machine_mode srcmode;
3216 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3218 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3219 srcmode != orig_srcmode;
3220 srcmode = GET_MODE_WIDER_MODE (srcmode))
3223 rtx trunc_y, last_insn;
3225 /* Skip if the target can't extend this way. */
3226 ic = can_extend_p (dstmode, srcmode, 0);
3227 if (ic == CODE_FOR_nothing)
3230 /* Skip if the narrowed value isn't exact. */
3231 if (! exact_real_truncate (srcmode, &r))
3234 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3236 if (LEGITIMATE_CONSTANT_P (trunc_y))
3238 /* Skip if the target needs extra instructions to perform
3240 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3243 else if (float_extend_from_mem[dstmode][srcmode])
3244 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
3248 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3249 last_insn = get_last_insn ();
3252 set_unique_reg_note (last_insn, REG_EQUAL, y);
3260 /* Pushing data onto the stack. */
3262 /* Push a block of length SIZE (perhaps variable)
3263 and return an rtx to address the beginning of the block.
3264 Note that it is not possible for the value returned to be a QUEUED.
3265 The value may be virtual_outgoing_args_rtx.
3267 EXTRA is the number of bytes of padding to push in addition to SIZE.
3268 BELOW nonzero means this padding comes at low addresses;
3269 otherwise, the padding comes at high addresses. */
3272 push_block (rtx size, int extra, int below)
3276 size = convert_modes (Pmode, ptr_mode, size, 1);
3277 if (CONSTANT_P (size))
3278 anti_adjust_stack (plus_constant (size, extra));
3279 else if (REG_P (size) && extra == 0)
3280 anti_adjust_stack (size);
3283 temp = copy_to_mode_reg (Pmode, size);
3285 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3286 temp, 0, OPTAB_LIB_WIDEN);
3287 anti_adjust_stack (temp);
3290 #ifndef STACK_GROWS_DOWNWARD
3296 temp = virtual_outgoing_args_rtx;
3297 if (extra != 0 && below)
3298 temp = plus_constant (temp, extra);
3302 if (GET_CODE (size) == CONST_INT)
3303 temp = plus_constant (virtual_outgoing_args_rtx,
3304 -INTVAL (size) - (below ? 0 : extra));
3305 else if (extra != 0 && !below)
3306 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3307 negate_rtx (Pmode, plus_constant (size, extra)));
3309 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3310 negate_rtx (Pmode, size));
3313 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3316 #ifdef PUSH_ROUNDING
3318 /* Emit single push insn. */
3321 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3324 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3326 enum insn_code icode;
3327 insn_operand_predicate_fn pred;
3329 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3330 /* If there is push pattern, use it. Otherwise try old way of throwing
3331 MEM representing push operation to move expander. */
3332 icode = push_optab->handlers[(int) mode].insn_code;
3333 if (icode != CODE_FOR_nothing)
3335 if (((pred = insn_data[(int) icode].operand[0].predicate)
3336 && !((*pred) (x, mode))))
3337 x = force_reg (mode, x);
3338 emit_insn (GEN_FCN (icode) (x));
3341 if (GET_MODE_SIZE (mode) == rounded_size)
3342 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3343 /* If we are to pad downward, adjust the stack pointer first and
3344 then store X into the stack location using an offset. This is
3345 because emit_move_insn does not know how to pad; it does not have
3347 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3349 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3350 HOST_WIDE_INT offset;
3352 emit_move_insn (stack_pointer_rtx,
3353 expand_binop (Pmode,
3354 #ifdef STACK_GROWS_DOWNWARD
3360 GEN_INT (rounded_size),
3361 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3363 offset = (HOST_WIDE_INT) padding_size;
3364 #ifdef STACK_GROWS_DOWNWARD
3365 if (STACK_PUSH_CODE == POST_DEC)
3366 /* We have already decremented the stack pointer, so get the
3368 offset += (HOST_WIDE_INT) rounded_size;
3370 if (STACK_PUSH_CODE == POST_INC)
3371 /* We have already incremented the stack pointer, so get the
3373 offset -= (HOST_WIDE_INT) rounded_size;
3375 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3379 #ifdef STACK_GROWS_DOWNWARD
3380 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3381 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3382 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3384 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3385 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3386 GEN_INT (rounded_size));
3388 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3391 dest = gen_rtx_MEM (mode, dest_addr);
3395 set_mem_attributes (dest, type, 1);
3397 if (flag_optimize_sibling_calls)
3398 /* Function incoming arguments may overlap with sibling call
3399 outgoing arguments and we cannot allow reordering of reads
3400 from function arguments with stores to outgoing arguments
3401 of sibling calls. */
3402 set_mem_alias_set (dest, 0);
3404 emit_move_insn (dest, x);
3408 /* Generate code to push X onto the stack, assuming it has mode MODE and
3410 MODE is redundant except when X is a CONST_INT (since they don't
3412 SIZE is an rtx for the size of data to be copied (in bytes),
3413 needed only if X is BLKmode.
3415 ALIGN (in bits) is maximum alignment we can assume.
3417 If PARTIAL and REG are both nonzero, then copy that many of the first
3418 words of X into registers starting with REG, and push the rest of X.
3419 The amount of space pushed is decreased by PARTIAL words,
3420 rounded *down* to a multiple of PARM_BOUNDARY.
3421 REG must be a hard register in this case.
3422 If REG is zero but PARTIAL is not, take any all others actions for an
3423 argument partially in registers, but do not actually load any
3426 EXTRA is the amount in bytes of extra space to leave next to this arg.
3427 This is ignored if an argument block has already been allocated.
3429 On a machine that lacks real push insns, ARGS_ADDR is the address of
3430 the bottom of the argument block for this call. We use indexing off there
3431 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3432 argument block has not been preallocated.
3434 ARGS_SO_FAR is the size of args previously pushed for this call.
3436 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3437 for arguments passed in registers. If nonzero, it will be the number
3438 of bytes required. */
3441 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3442 unsigned int align, int partial, rtx reg, int extra,
3443 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3447 enum direction stack_direction
3448 #ifdef STACK_GROWS_DOWNWARD
3454 /* Decide where to pad the argument: `downward' for below,
3455 `upward' for above, or `none' for don't pad it.
3456 Default is below for small data on big-endian machines; else above. */
3457 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3459 /* Invert direction if stack is post-decrement.
3461 if (STACK_PUSH_CODE == POST_DEC)
3462 if (where_pad != none)
3463 where_pad = (where_pad == downward ? upward : downward);
3465 xinner = x = protect_from_queue (x, 0);
3467 if (mode == BLKmode)
3469 /* Copy a block into the stack, entirely or partially. */
3472 int used = partial * UNITS_PER_WORD;
3476 if (reg && GET_CODE (reg) == PARALLEL)
3478 /* Use the size of the elt to compute offset. */
3479 rtx elt = XEXP (XVECEXP (reg, 0, 0), 0);
3480 used = partial * GET_MODE_SIZE (GET_MODE (elt));
3481 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3484 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3491 /* USED is now the # of bytes we need not copy to the stack
3492 because registers will take care of them. */
3495 xinner = adjust_address (xinner, BLKmode, used);
3497 /* If the partial register-part of the arg counts in its stack size,
3498 skip the part of stack space corresponding to the registers.
3499 Otherwise, start copying to the beginning of the stack space,
3500 by setting SKIP to 0. */
3501 skip = (reg_parm_stack_space == 0) ? 0 : used;
3503 #ifdef PUSH_ROUNDING
3504 /* Do it with several push insns if that doesn't take lots of insns
3505 and if there is no difficulty with push insns that skip bytes
3506 on the stack for alignment purposes. */
3509 && GET_CODE (size) == CONST_INT
3511 && MEM_ALIGN (xinner) >= align
3512 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3513 /* Here we avoid the case of a structure whose weak alignment
3514 forces many pushes of a small amount of data,
3515 and such small pushes do rounding that causes trouble. */
3516 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3517 || align >= BIGGEST_ALIGNMENT
3518 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3519 == (align / BITS_PER_UNIT)))
3520 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3522 /* Push padding now if padding above and stack grows down,
3523 or if padding below and stack grows up.
3524 But if space already allocated, this has already been done. */
3525 if (extra && args_addr == 0
3526 && where_pad != none && where_pad != stack_direction)
3527 anti_adjust_stack (GEN_INT (extra));
3529 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3532 #endif /* PUSH_ROUNDING */
3536 /* Otherwise make space on the stack and copy the data
3537 to the address of that space. */
3539 /* Deduct words put into registers from the size we must copy. */
3542 if (GET_CODE (size) == CONST_INT)
3543 size = GEN_INT (INTVAL (size) - used);
3545 size = expand_binop (GET_MODE (size), sub_optab, size,
3546 GEN_INT (used), NULL_RTX, 0,
3550 /* Get the address of the stack space.
3551 In this case, we do not deal with EXTRA separately.
3552 A single stack adjust will do. */
3555 temp = push_block (size, extra, where_pad == downward);
3558 else if (GET_CODE (args_so_far) == CONST_INT)
3559 temp = memory_address (BLKmode,
3560 plus_constant (args_addr,
3561 skip + INTVAL (args_so_far)));
3563 temp = memory_address (BLKmode,
3564 plus_constant (gen_rtx_PLUS (Pmode,
3569 if (!ACCUMULATE_OUTGOING_ARGS)
3571 /* If the source is referenced relative to the stack pointer,
3572 copy it to another register to stabilize it. We do not need
3573 to do this if we know that we won't be changing sp. */
3575 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3576 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3577 temp = copy_to_reg (temp);
3580 target = gen_rtx_MEM (BLKmode, temp);
3584 set_mem_attributes (target, type, 1);
3585 /* Function incoming arguments may overlap with sibling call
3586 outgoing arguments and we cannot allow reordering of reads
3587 from function arguments with stores to outgoing arguments
3588 of sibling calls. */
3589 set_mem_alias_set (target, 0);
3592 /* ALIGN may well be better aligned than TYPE, e.g. due to
3593 PARM_BOUNDARY. Assume the caller isn't lying. */
3594 set_mem_align (target, align);
3596 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3599 else if (partial > 0)
3601 /* Scalar partly in registers. */
3603 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3606 /* # words of start of argument
3607 that we must make space for but need not store. */
3608 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3609 int args_offset = INTVAL (args_so_far);
3612 /* Push padding now if padding above and stack grows down,
3613 or if padding below and stack grows up.
3614 But if space already allocated, this has already been done. */
3615 if (extra && args_addr == 0
3616 && where_pad != none && where_pad != stack_direction)
3617 anti_adjust_stack (GEN_INT (extra));
3619 /* If we make space by pushing it, we might as well push
3620 the real data. Otherwise, we can leave OFFSET nonzero
3621 and leave the space uninitialized. */
3625 /* Now NOT_STACK gets the number of words that we don't need to
3626 allocate on the stack. */
3627 not_stack = partial - offset;
3629 /* If the partial register-part of the arg counts in its stack size,
3630 skip the part of stack space corresponding to the registers.
3631 Otherwise, start copying to the beginning of the stack space,
3632 by setting SKIP to 0. */
3633 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3635 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3636 x = validize_mem (force_const_mem (mode, x));
3638 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3639 SUBREGs of such registers are not allowed. */
3640 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3641 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3642 x = copy_to_reg (x);
3644 /* Loop over all the words allocated on the stack for this arg. */
3645 /* We can do it by words, because any scalar bigger than a word
3646 has a size a multiple of a word. */
3647 #ifndef PUSH_ARGS_REVERSED
3648 for (i = not_stack; i < size; i++)
3650 for (i = size - 1; i >= not_stack; i--)
3652 if (i >= not_stack + offset)
3653 emit_push_insn (operand_subword_force (x, i, mode),
3654 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3656 GEN_INT (args_offset + ((i - not_stack + skip)
3658 reg_parm_stack_space, alignment_pad);
3665 /* Push padding now if padding above and stack grows down,
3666 or if padding below and stack grows up.
3667 But if space already allocated, this has already been done. */
3668 if (extra && args_addr == 0
3669 && where_pad != none && where_pad != stack_direction)
3670 anti_adjust_stack (GEN_INT (extra));
3672 #ifdef PUSH_ROUNDING
3673 if (args_addr == 0 && PUSH_ARGS)
3674 emit_single_push_insn (mode, x, type);
3678 if (GET_CODE (args_so_far) == CONST_INT)
3680 = memory_address (mode,
3681 plus_constant (args_addr,
3682 INTVAL (args_so_far)));
3684 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3686 dest = gen_rtx_MEM (mode, addr);
3689 set_mem_attributes (dest, type, 1);
3690 /* Function incoming arguments may overlap with sibling call
3691 outgoing arguments and we cannot allow reordering of reads
3692 from function arguments with stores to outgoing arguments
3693 of sibling calls. */
3694 set_mem_alias_set (dest, 0);
3697 emit_move_insn (dest, x);
3701 /* If part should go in registers, copy that part
3702 into the appropriate registers. Do this now, at the end,
3703 since mem-to-mem copies above may do function calls. */
3704 if (partial > 0 && reg != 0)
3706 /* Handle calls that pass values in multiple non-contiguous locations.
3707 The Irix 6 ABI has examples of this. */
3708 if (GET_CODE (reg) == PARALLEL)
3709 emit_group_load (reg, x, type, -1);
3711 move_block_to_reg (REGNO (reg), x, partial, mode);
3714 if (extra && args_addr == 0 && where_pad == stack_direction)
3715 anti_adjust_stack (GEN_INT (extra));
3717 if (alignment_pad && args_addr == 0)
3718 anti_adjust_stack (alignment_pad);
3721 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3725 get_subtarget (rtx x)
3728 /* Only registers can be subtargets. */
3730 /* If the register is readonly, it can't be set more than once. */
3731 || RTX_UNCHANGING_P (x)
3732 /* Don't use hard regs to avoid extending their life. */
3733 || REGNO (x) < FIRST_PSEUDO_REGISTER
3734 /* Avoid subtargets inside loops,
3735 since they hide some invariant expressions. */
3736 || preserve_subexpressions_p ())
3740 /* Expand an assignment that stores the value of FROM into TO.
3741 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3742 (This may contain a QUEUED rtx;
3743 if the value is constant, this rtx is a constant.)
3744 Otherwise, the returned value is NULL_RTX. */
3747 expand_assignment (tree to, tree from, int want_value)
3752 /* Don't crash if the lhs of the assignment was erroneous. */
3754 if (TREE_CODE (to) == ERROR_MARK)
3756 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3757 return want_value ? result : NULL_RTX;
3760 /* Assignment of a structure component needs special treatment
3761 if the structure component's rtx is not simply a MEM.
3762 Assignment of an array element at a constant index, and assignment of
3763 an array element in an unaligned packed structure field, has the same
3766 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3767 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF
3768 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
3770 enum machine_mode mode1;
3771 HOST_WIDE_INT bitsize, bitpos;
3779 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3780 &unsignedp, &volatilep);
3782 /* If we are going to use store_bit_field and extract_bit_field,
3783 make sure to_rtx will be safe for multiple use. */
3785 if (mode1 == VOIDmode && want_value)
3786 tem = stabilize_reference (tem);
3788 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3792 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
3794 if (GET_CODE (to_rtx) != MEM)
3797 #ifdef POINTERS_EXTEND_UNSIGNED
3798 if (GET_MODE (offset_rtx) != Pmode)
3799 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
3801 if (GET_MODE (offset_rtx) != ptr_mode)
3802 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3805 /* A constant address in TO_RTX can have VOIDmode, we must not try
3806 to call force_reg for that case. Avoid that case. */
3807 if (GET_CODE (to_rtx) == MEM
3808 && GET_MODE (to_rtx) == BLKmode
3809 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3811 && (bitpos % bitsize) == 0
3812 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3813 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
3815 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3819 to_rtx = offset_address (to_rtx, offset_rtx,
3820 highest_pow2_factor_for_target (to,
3824 if (GET_CODE (to_rtx) == MEM)
3826 /* If the field is at offset zero, we could have been given the
3827 DECL_RTX of the parent struct. Don't munge it. */
3828 to_rtx = shallow_copy_rtx (to_rtx);
3830 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
3833 /* Deal with volatile and readonly fields. The former is only done
3834 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3835 if (volatilep && GET_CODE (to_rtx) == MEM)
3837 if (to_rtx == orig_to_rtx)
3838 to_rtx = copy_rtx (to_rtx);
3839 MEM_VOLATILE_P (to_rtx) = 1;
3842 if (TREE_CODE (to) == COMPONENT_REF
3843 && TREE_READONLY (TREE_OPERAND (to, 1))
3844 /* We can't assert that a MEM won't be set more than once
3845 if the component is not addressable because another
3846 non-addressable component may be referenced by the same MEM. */
3847 && ! (GET_CODE (to_rtx) == MEM && ! can_address_p (to)))
3849 if (to_rtx == orig_to_rtx)
3850 to_rtx = copy_rtx (to_rtx);
3851 RTX_UNCHANGING_P (to_rtx) = 1;
3854 if (GET_CODE (to_rtx) == MEM && ! can_address_p (to))
3856 if (to_rtx == orig_to_rtx)
3857 to_rtx = copy_rtx (to_rtx);
3858 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3861 if (mode1 == VOIDmode && !want_value
3862 && bitpos + bitsize <= BITS_PER_WORD
3863 && bitsize < BITS_PER_WORD
3864 && GET_MODE_BITSIZE (GET_MODE (to_rtx)) <= BITS_PER_WORD
3865 && !TREE_SIDE_EFFECTS (to)
3866 && TREE_CODE (TREE_TYPE (from)) == INTEGER_TYPE
3867 && TREE_CODE_CLASS (TREE_CODE (from)) == '2'
3868 && operand_equal_p (to, TREE_OPERAND (from, 0), 0))
3871 HOST_WIDE_INT count = bitpos;
3873 if (BYTES_BIG_ENDIAN)
3874 count = GET_MODE_BITSIZE (GET_MODE (to_rtx)) - bitpos - bitsize;
3876 /* Special case some bitfield op= exp. */
3877 switch (TREE_CODE (from))
3884 /* For now, just optimize the case of the topmost bitfield
3885 where we don't need to do any masking.
3886 We might win by one instruction for the other bitfields
3887 too if insv/extv instructions aren't used, so that
3888 can be added later. */
3889 if (count + bitsize != GET_MODE_BITSIZE (GET_MODE (to_rtx)))
3891 value = expand_expr (TREE_OPERAND (from, 1), NULL_RTX,
3893 value = protect_from_queue (value, 0);
3894 to_rtx = protect_from_queue (to_rtx, 1);
3895 value = expand_shift (LSHIFT_EXPR, GET_MODE (to_rtx),
3896 value, build_int_2 (count, 0),
3898 result = expand_binop (GET_MODE (to_rtx),
3899 TREE_CODE (from) == PLUS_EXPR
3900 ? add_optab : sub_optab, to_rtx,
3901 value, to_rtx, 1, OPTAB_WIDEN);
3902 if (result != to_rtx)
3903 emit_move_insn (to_rtx, result);
3912 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3914 /* Spurious cast for HPUX compiler. */
3915 ? ((enum machine_mode)
3916 TYPE_MODE (TREE_TYPE (to)))
3918 unsignedp, TREE_TYPE (tem), get_alias_set (to));
3920 preserve_temp_slots (result);
3924 /* If the value is meaningful, convert RESULT to the proper mode.
3925 Otherwise, return nothing. */
3926 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3927 TYPE_MODE (TREE_TYPE (from)),
3929 TYPE_UNSIGNED (TREE_TYPE (to)))
3933 /* If the rhs is a function call and its value is not an aggregate,
3934 call the function before we start to compute the lhs.
3935 This is needed for correct code for cases such as
3936 val = setjmp (buf) on machines where reference to val
3937 requires loading up part of an address in a separate insn.
3939 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3940 since it might be a promoted variable where the zero- or sign- extension
3941 needs to be done. Handling this in the normal way is safe because no
3942 computation is done before the call. */
3943 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
3944 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3945 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3946 && REG_P (DECL_RTL (to))))
3951 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3953 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3955 /* Handle calls that return values in multiple non-contiguous locations.
3956 The Irix 6 ABI has examples of this. */
3957 if (GET_CODE (to_rtx) == PARALLEL)
3958 emit_group_load (to_rtx, value, TREE_TYPE (from),
3959 int_size_in_bytes (TREE_TYPE (from)));
3960 else if (GET_MODE (to_rtx) == BLKmode)
3961 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
3964 if (POINTER_TYPE_P (TREE_TYPE (to)))
3965 value = convert_memory_address (GET_MODE (to_rtx), value);
3966 emit_move_insn (to_rtx, value);
3968 preserve_temp_slots (to_rtx);
3971 return want_value ? to_rtx : NULL_RTX;
3974 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3975 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3978 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3980 /* Don't move directly into a return register. */
3981 if (TREE_CODE (to) == RESULT_DECL
3982 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
3987 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3989 if (GET_CODE (to_rtx) == PARALLEL)
3990 emit_group_load (to_rtx, temp, TREE_TYPE (from),
3991 int_size_in_bytes (TREE_TYPE (from)));
3993 emit_move_insn (to_rtx, temp);
3995 preserve_temp_slots (to_rtx);
3998 return want_value ? to_rtx : NULL_RTX;
4001 /* In case we are returning the contents of an object which overlaps
4002 the place the value is being stored, use a safe function when copying
4003 a value through a pointer into a structure value return block. */
4004 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
4005 && current_function_returns_struct
4006 && !current_function_returns_pcc_struct)
4011 size = expr_size (from);
4012 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
4014 if (TARGET_MEM_FUNCTIONS)
4015 emit_library_call (memmove_libfunc, LCT_NORMAL,
4016 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4017 XEXP (from_rtx, 0), Pmode,
4018 convert_to_mode (TYPE_MODE (sizetype),
4019 size, TYPE_UNSIGNED (sizetype)),
4020 TYPE_MODE (sizetype));
4022 emit_library_call (bcopy_libfunc, LCT_NORMAL,
4023 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
4024 XEXP (to_rtx, 0), Pmode,
4025 convert_to_mode (TYPE_MODE (integer_type_node),
4027 TYPE_UNSIGNED (integer_type_node)),
4028 TYPE_MODE (integer_type_node));
4030 preserve_temp_slots (to_rtx);
4033 return want_value ? to_rtx : NULL_RTX;
4036 /* Compute FROM and store the value in the rtx we got. */
4039 result = store_expr (from, to_rtx, want_value);
4040 preserve_temp_slots (result);
4043 return want_value ? result : NULL_RTX;
4046 /* Generate code for computing expression EXP,
4047 and storing the value into TARGET.
4048 TARGET may contain a QUEUED rtx.
4050 If WANT_VALUE & 1 is nonzero, return a copy of the value
4051 not in TARGET, so that we can be sure to use the proper
4052 value in a containing expression even if TARGET has something
4053 else stored in it. If possible, we copy the value through a pseudo
4054 and return that pseudo. Or, if the value is constant, we try to
4055 return the constant. In some cases, we return a pseudo
4056 copied *from* TARGET.
4058 If the mode is BLKmode then we may return TARGET itself.
4059 It turns out that in BLKmode it doesn't cause a problem.
4060 because C has no operators that could combine two different
4061 assignments into the same BLKmode object with different values
4062 with no sequence point. Will other languages need this to
4065 If WANT_VALUE & 1 is 0, we return NULL, to make sure
4066 to catch quickly any cases where the caller uses the value
4067 and fails to set WANT_VALUE.
4069 If WANT_VALUE & 2 is set, this is a store into a call param on the
4070 stack, and block moves may need to be treated specially. */
4073 store_expr (tree exp, rtx target, int want_value)
4076 rtx alt_rtl = NULL_RTX;
4077 rtx mark = mark_queue ();
4078 int dont_return_target = 0;
4079 int dont_store_target = 0;
4081 if (VOID_TYPE_P (TREE_TYPE (exp)))
4083 /* C++ can generate ?: expressions with a throw expression in one
4084 branch and an rvalue in the other. Here, we resolve attempts to
4085 store the throw expression's nonexistent result. */
4088 expand_expr (exp, const0_rtx, VOIDmode, 0);
4091 if (TREE_CODE (exp) == COMPOUND_EXPR)
4093 /* Perform first part of compound expression, then assign from second
4095 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4096 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4098 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4100 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4102 /* For conditional expression, get safe form of the target. Then
4103 test the condition, doing the appropriate assignment on either
4104 side. This avoids the creation of unnecessary temporaries.
4105 For non-BLKmode, it is more efficient not to do this. */
4107 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4110 target = protect_from_queue (target, 1);
4112 do_pending_stack_adjust ();
4114 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4115 start_cleanup_deferral ();
4116 store_expr (TREE_OPERAND (exp, 1), target, want_value & 2);
4117 end_cleanup_deferral ();
4119 emit_jump_insn (gen_jump (lab2));
4122 start_cleanup_deferral ();
4123 store_expr (TREE_OPERAND (exp, 2), target, want_value & 2);
4124 end_cleanup_deferral ();
4129 return want_value & 1 ? target : NULL_RTX;
4131 else if (queued_subexp_p (target))
4132 /* If target contains a postincrement, let's not risk
4133 using it as the place to generate the rhs. */
4135 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4137 /* Expand EXP into a new pseudo. */
4138 temp = gen_reg_rtx (GET_MODE (target));
4139 temp = expand_expr (exp, temp, GET_MODE (target),
4141 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4144 temp = expand_expr (exp, NULL_RTX, GET_MODE (target),
4146 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4148 /* If target is volatile, ANSI requires accessing the value
4149 *from* the target, if it is accessed. So make that happen.
4150 In no case return the target itself. */
4151 if (! MEM_VOLATILE_P (target) && (want_value & 1) != 0)
4152 dont_return_target = 1;
4154 else if ((want_value & 1) != 0
4155 && GET_CODE (target) == MEM
4156 && ! MEM_VOLATILE_P (target)
4157 && GET_MODE (target) != BLKmode)
4158 /* If target is in memory and caller wants value in a register instead,
4159 arrange that. Pass TARGET as target for expand_expr so that,
4160 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4161 We know expand_expr will not use the target in that case.
4162 Don't do this if TARGET is volatile because we are supposed
4163 to write it and then read it. */
4165 temp = expand_expr (exp, target, GET_MODE (target),
4166 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4167 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4169 /* If TEMP is already in the desired TARGET, only copy it from
4170 memory and don't store it there again. */
4172 || (rtx_equal_p (temp, target)
4173 && ! side_effects_p (temp) && ! side_effects_p (target)))
4174 dont_store_target = 1;
4175 temp = copy_to_reg (temp);
4177 dont_return_target = 1;
4179 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4180 /* If this is a scalar in a register that is stored in a wider mode
4181 than the declared mode, compute the result into its declared mode
4182 and then convert to the wider mode. Our value is the computed
4185 rtx inner_target = 0;
4187 /* If we don't want a value, we can do the conversion inside EXP,
4188 which will often result in some optimizations. Do the conversion
4189 in two steps: first change the signedness, if needed, then
4190 the extend. But don't do this if the type of EXP is a subtype
4191 of something else since then the conversion might involve
4192 more than just converting modes. */
4193 if ((want_value & 1) == 0
4194 && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4195 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4197 if (TYPE_UNSIGNED (TREE_TYPE (exp))
4198 != SUBREG_PROMOTED_UNSIGNED_P (target))
4200 (lang_hooks.types.signed_or_unsigned_type
4201 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4203 exp = convert (lang_hooks.types.type_for_mode
4204 (GET_MODE (SUBREG_REG (target)),
4205 SUBREG_PROMOTED_UNSIGNED_P (target)),
4208 inner_target = SUBREG_REG (target);
4211 temp = expand_expr (exp, inner_target, VOIDmode,
4212 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4214 /* If TEMP is a MEM and we want a result value, make the access
4215 now so it gets done only once. Strictly speaking, this is
4216 only necessary if the MEM is volatile, or if the address
4217 overlaps TARGET. But not performing the load twice also
4218 reduces the amount of rtl we generate and then have to CSE. */
4219 if (GET_CODE (temp) == MEM && (want_value & 1) != 0)
4220 temp = copy_to_reg (temp);
4222 /* If TEMP is a VOIDmode constant, use convert_modes to make
4223 sure that we properly convert it. */
4224 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4226 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4227 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4228 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4229 GET_MODE (target), temp,
4230 SUBREG_PROMOTED_UNSIGNED_P (target));
4233 convert_move (SUBREG_REG (target), temp,
4234 SUBREG_PROMOTED_UNSIGNED_P (target));
4236 /* If we promoted a constant, change the mode back down to match
4237 target. Otherwise, the caller might get confused by a result whose
4238 mode is larger than expected. */
4240 if ((want_value & 1) != 0 && GET_MODE (temp) != GET_MODE (target))
4242 if (GET_MODE (temp) != VOIDmode)
4244 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4245 SUBREG_PROMOTED_VAR_P (temp) = 1;
4246 SUBREG_PROMOTED_UNSIGNED_SET (temp,
4247 SUBREG_PROMOTED_UNSIGNED_P (target));
4250 temp = convert_modes (GET_MODE (target),
4251 GET_MODE (SUBREG_REG (target)),
4252 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4255 return want_value & 1 ? temp : NULL_RTX;
4259 temp = expand_expr_real (exp, target, GET_MODE (target),
4261 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
4263 /* Return TARGET if it's a specified hardware register.
4264 If TARGET is a volatile mem ref, either return TARGET
4265 or return a reg copied *from* TARGET; ANSI requires this.
4267 Otherwise, if TEMP is not TARGET, return TEMP
4268 if it is constant (for efficiency),
4269 or if we really want the correct value. */
4270 if (!(target && REG_P (target)
4271 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4272 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4273 && ! rtx_equal_p (temp, target)
4274 && (CONSTANT_P (temp) || (want_value & 1) != 0))
4275 dont_return_target = 1;
4278 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4279 the same as that of TARGET, adjust the constant. This is needed, for
4280 example, in case it is a CONST_DOUBLE and we want only a word-sized
4282 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4283 && TREE_CODE (exp) != ERROR_MARK
4284 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4285 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4286 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
4288 /* If value was not generated in the target, store it there.
4289 Convert the value to TARGET's type first if necessary and emit the
4290 pending incrementations that have been queued when expanding EXP.
4291 Note that we cannot emit the whole queue blindly because this will
4292 effectively disable the POST_INC optimization later.
4294 If TEMP and TARGET compare equal according to rtx_equal_p, but
4295 one or both of them are volatile memory refs, we have to distinguish
4297 - expand_expr has used TARGET. In this case, we must not generate
4298 another copy. This can be detected by TARGET being equal according
4300 - expand_expr has not used TARGET - that means that the source just
4301 happens to have the same RTX form. Since temp will have been created
4302 by expand_expr, it will compare unequal according to == .
4303 We must generate a copy in this case, to reach the correct number
4304 of volatile memory references. */
4306 if ((! rtx_equal_p (temp, target)
4307 || (temp != target && (side_effects_p (temp)
4308 || side_effects_p (target))))
4309 && TREE_CODE (exp) != ERROR_MARK
4310 && ! dont_store_target
4311 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4312 but TARGET is not valid memory reference, TEMP will differ
4313 from TARGET although it is really the same location. */
4314 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
4315 /* If there's nothing to copy, don't bother. Don't call expr_size
4316 unless necessary, because some front-ends (C++) expr_size-hook
4317 aborts on objects that are not supposed to be bit-copied or
4319 && expr_size (exp) != const0_rtx)
4321 emit_insns_enqueued_after_mark (mark);
4322 target = protect_from_queue (target, 1);
4323 temp = protect_from_queue (temp, 0);
4324 if (GET_MODE (temp) != GET_MODE (target)
4325 && GET_MODE (temp) != VOIDmode)
4327 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4328 if (dont_return_target)
4330 /* In this case, we will return TEMP,
4331 so make sure it has the proper mode.
4332 But don't forget to store the value into TARGET. */
4333 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4334 emit_move_insn (target, temp);
4337 convert_move (target, temp, unsignedp);
4340 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4342 /* Handle copying a string constant into an array. The string
4343 constant may be shorter than the array. So copy just the string's
4344 actual length, and clear the rest. First get the size of the data
4345 type of the string, which is actually the size of the target. */
4346 rtx size = expr_size (exp);
4348 if (GET_CODE (size) == CONST_INT
4349 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4350 emit_block_move (target, temp, size,
4352 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4355 /* Compute the size of the data to copy from the string. */
4357 = size_binop (MIN_EXPR,
4358 make_tree (sizetype, size),
4359 size_int (TREE_STRING_LENGTH (exp)));
4361 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4363 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4366 /* Copy that much. */
4367 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4368 TYPE_UNSIGNED (sizetype));
4369 emit_block_move (target, temp, copy_size_rtx,
4371 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4373 /* Figure out how much is left in TARGET that we have to clear.
4374 Do all calculations in ptr_mode. */
4375 if (GET_CODE (copy_size_rtx) == CONST_INT)
4377 size = plus_constant (size, -INTVAL (copy_size_rtx));
4378 target = adjust_address (target, BLKmode,
4379 INTVAL (copy_size_rtx));
4383 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4384 copy_size_rtx, NULL_RTX, 0,
4387 #ifdef POINTERS_EXTEND_UNSIGNED
4388 if (GET_MODE (copy_size_rtx) != Pmode)
4389 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4390 TYPE_UNSIGNED (sizetype));
4393 target = offset_address (target, copy_size_rtx,
4394 highest_pow2_factor (copy_size));
4395 label = gen_label_rtx ();
4396 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4397 GET_MODE (size), 0, label);
4400 if (size != const0_rtx)
4401 clear_storage (target, size);
4407 /* Handle calls that return values in multiple non-contiguous locations.
4408 The Irix 6 ABI has examples of this. */
4409 else if (GET_CODE (target) == PARALLEL)
4410 emit_group_load (target, temp, TREE_TYPE (exp),
4411 int_size_in_bytes (TREE_TYPE (exp)));
4412 else if (GET_MODE (temp) == BLKmode)
4413 emit_block_move (target, temp, expr_size (exp),
4415 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4418 temp = force_operand (temp, target);
4420 emit_move_insn (target, temp);
4424 /* If we don't want a value, return NULL_RTX. */
4425 if ((want_value & 1) == 0)
4428 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4429 ??? The latter test doesn't seem to make sense. */
4430 else if (dont_return_target && GET_CODE (temp) != MEM)
4433 /* Return TARGET itself if it is a hard register. */
4434 else if ((want_value & 1) != 0
4435 && GET_MODE (target) != BLKmode
4436 && ! (REG_P (target)
4437 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4438 return copy_to_reg (target);
4444 /* Examine CTOR. Discover how many scalar fields are set to nonzero
4445 values and place it in *P_NZ_ELTS. Discover how many scalar fields
4446 are set to non-constant values and place it in *P_NC_ELTS. */
4449 categorize_ctor_elements_1 (tree ctor, HOST_WIDE_INT *p_nz_elts,
4450 HOST_WIDE_INT *p_nc_elts)
4452 HOST_WIDE_INT nz_elts, nc_elts;
4458 for (list = CONSTRUCTOR_ELTS (ctor); list; list = TREE_CHAIN (list))
4460 tree value = TREE_VALUE (list);
4461 tree purpose = TREE_PURPOSE (list);
4465 if (TREE_CODE (purpose) == RANGE_EXPR)
4467 tree lo_index = TREE_OPERAND (purpose, 0);
4468 tree hi_index = TREE_OPERAND (purpose, 1);
4470 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4471 mult = (tree_low_cst (hi_index, 1)
4472 - tree_low_cst (lo_index, 1) + 1);
4475 switch (TREE_CODE (value))
4479 HOST_WIDE_INT nz = 0, nc = 0;
4480 categorize_ctor_elements_1 (value, &nz, &nc);
4481 nz_elts += mult * nz;
4482 nc_elts += mult * nc;
4488 if (!initializer_zerop (value))
4492 if (!initializer_zerop (TREE_REALPART (value)))
4494 if (!initializer_zerop (TREE_IMAGPART (value)))
4500 for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
4501 if (!initializer_zerop (TREE_VALUE (v)))
4508 if (!initializer_constant_valid_p (value, TREE_TYPE (value)))
4514 *p_nz_elts += nz_elts;
4515 *p_nc_elts += nc_elts;
4519 categorize_ctor_elements (tree ctor, HOST_WIDE_INT *p_nz_elts,
4520 HOST_WIDE_INT *p_nc_elts)
4524 categorize_ctor_elements_1 (ctor, p_nz_elts, p_nc_elts);
4527 /* Count the number of scalars in TYPE. Return -1 on overflow or
4531 count_type_elements (tree type)
4533 const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
4534 switch (TREE_CODE (type))
4538 tree telts = array_type_nelts (type);
4539 if (telts && host_integerp (telts, 1))
4541 HOST_WIDE_INT n = tree_low_cst (telts, 1);
4542 HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type));
4553 HOST_WIDE_INT n = 0, t;
4556 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4557 if (TREE_CODE (f) == FIELD_DECL)
4559 t = count_type_elements (TREE_TYPE (f));
4569 case QUAL_UNION_TYPE:
4571 /* Ho hum. How in the world do we guess here? Clearly it isn't
4572 right to count the fields. Guess based on the number of words. */
4573 HOST_WIDE_INT n = int_size_in_bytes (type);
4576 return n / UNITS_PER_WORD;
4583 /* ??? This is broke. We should encode the vector width in the tree. */
4584 return GET_MODE_NUNITS (TYPE_MODE (type));
4593 case REFERENCE_TYPE:
4607 /* Return 1 if EXP contains mostly (3/4) zeros. */
4610 mostly_zeros_p (tree exp)
4612 if (TREE_CODE (exp) == CONSTRUCTOR)
4615 HOST_WIDE_INT nz_elts, nc_elts, elts;
4617 /* If there are no ranges of true bits, it is all zero. */
4618 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4619 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4621 categorize_ctor_elements (exp, &nz_elts, &nc_elts);
4622 elts = count_type_elements (TREE_TYPE (exp));
4624 return nz_elts < elts / 4;
4627 return initializer_zerop (exp);
4630 /* Helper function for store_constructor.
4631 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4632 TYPE is the type of the CONSTRUCTOR, not the element type.
4633 CLEARED is as for store_constructor.
4634 ALIAS_SET is the alias set to use for any stores.
4636 This provides a recursive shortcut back to store_constructor when it isn't
4637 necessary to go through store_field. This is so that we can pass through
4638 the cleared field to let store_constructor know that we may not have to
4639 clear a substructure if the outer structure has already been cleared. */
4642 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4643 HOST_WIDE_INT bitpos, enum machine_mode mode,
4644 tree exp, tree type, int cleared, int alias_set)
4646 if (TREE_CODE (exp) == CONSTRUCTOR
4647 /* We can only call store_constructor recursively if the size and
4648 bit position are on a byte boundary. */
4649 && bitpos % BITS_PER_UNIT == 0
4650 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
4651 /* If we have a nonzero bitpos for a register target, then we just
4652 let store_field do the bitfield handling. This is unlikely to
4653 generate unnecessary clear instructions anyways. */
4654 && (bitpos == 0 || GET_CODE (target) == MEM))
4656 if (GET_CODE (target) == MEM)
4658 = adjust_address (target,
4659 GET_MODE (target) == BLKmode
4661 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4662 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4665 /* Update the alias set, if required. */
4666 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4667 && MEM_ALIAS_SET (target) != 0)
4669 target = copy_rtx (target);
4670 set_mem_alias_set (target, alias_set);
4673 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4676 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4680 /* Store the value of constructor EXP into the rtx TARGET.
4681 TARGET is either a REG or a MEM; we know it cannot conflict, since
4682 safe_from_p has been called.
4683 CLEARED is true if TARGET is known to have been zero'd.
4684 SIZE is the number of bytes of TARGET we are allowed to modify: this
4685 may not be the same as the size of EXP if we are assigning to a field
4686 which has been packed to exclude padding bits. */
4689 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
4691 tree type = TREE_TYPE (exp);
4692 #ifdef WORD_REGISTER_OPERATIONS
4693 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4696 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4697 || TREE_CODE (type) == QUAL_UNION_TYPE)
4701 /* If size is zero or the target is already cleared, do nothing. */
4702 if (size == 0 || cleared)
4704 /* We either clear the aggregate or indicate the value is dead. */
4705 else if ((TREE_CODE (type) == UNION_TYPE
4706 || TREE_CODE (type) == QUAL_UNION_TYPE)
4707 && ! CONSTRUCTOR_ELTS (exp))
4708 /* If the constructor is empty, clear the union. */
4710 clear_storage (target, expr_size (exp));
4714 /* If we are building a static constructor into a register,
4715 set the initial value as zero so we can fold the value into
4716 a constant. But if more than one register is involved,
4717 this probably loses. */
4718 else if (REG_P (target) && TREE_STATIC (exp)
4719 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4721 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4725 /* If the constructor has fewer fields than the structure
4726 or if we are initializing the structure to mostly zeros,
4727 clear the whole structure first. Don't do this if TARGET is a
4728 register whose mode size isn't equal to SIZE since clear_storage
4729 can't handle this case. */
4731 && ((list_length (CONSTRUCTOR_ELTS (exp)) != fields_length (type))
4732 || mostly_zeros_p (exp))
4734 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4737 rtx xtarget = target;
4739 if (readonly_fields_p (type))
4741 xtarget = copy_rtx (xtarget);
4742 RTX_UNCHANGING_P (xtarget) = 1;
4745 clear_storage (xtarget, GEN_INT (size));
4750 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4752 /* Store each element of the constructor into
4753 the corresponding field of TARGET. */
4755 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4757 tree field = TREE_PURPOSE (elt);
4758 tree value = TREE_VALUE (elt);
4759 enum machine_mode mode;
4760 HOST_WIDE_INT bitsize;
4761 HOST_WIDE_INT bitpos = 0;
4763 rtx to_rtx = target;
4765 /* Just ignore missing fields.
4766 We cleared the whole structure, above,
4767 if any fields are missing. */
4771 if (cleared && initializer_zerop (value))
4774 if (host_integerp (DECL_SIZE (field), 1))
4775 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4779 mode = DECL_MODE (field);
4780 if (DECL_BIT_FIELD (field))
4783 offset = DECL_FIELD_OFFSET (field);
4784 if (host_integerp (offset, 0)
4785 && host_integerp (bit_position (field), 0))
4787 bitpos = int_bit_position (field);
4791 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4798 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
4799 make_tree (TREE_TYPE (exp),
4802 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4803 if (GET_CODE (to_rtx) != MEM)
4806 #ifdef POINTERS_EXTEND_UNSIGNED
4807 if (GET_MODE (offset_rtx) != Pmode)
4808 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4810 if (GET_MODE (offset_rtx) != ptr_mode)
4811 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4814 to_rtx = offset_address (to_rtx, offset_rtx,
4815 highest_pow2_factor (offset));
4818 if (TREE_READONLY (field))
4820 if (GET_CODE (to_rtx) == MEM)
4821 to_rtx = copy_rtx (to_rtx);
4823 RTX_UNCHANGING_P (to_rtx) = 1;
4826 #ifdef WORD_REGISTER_OPERATIONS
4827 /* If this initializes a field that is smaller than a word, at the
4828 start of a word, try to widen it to a full word.
4829 This special case allows us to output C++ member function
4830 initializations in a form that the optimizers can understand. */
4832 && bitsize < BITS_PER_WORD
4833 && bitpos % BITS_PER_WORD == 0
4834 && GET_MODE_CLASS (mode) == MODE_INT
4835 && TREE_CODE (value) == INTEGER_CST
4837 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4839 tree type = TREE_TYPE (value);
4841 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4843 type = lang_hooks.types.type_for_size
4844 (BITS_PER_WORD, TYPE_UNSIGNED (type));
4845 value = convert (type, value);
4848 if (BYTES_BIG_ENDIAN)
4850 = fold (build (LSHIFT_EXPR, type, value,
4851 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4852 bitsize = BITS_PER_WORD;
4857 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4858 && DECL_NONADDRESSABLE_P (field))
4860 to_rtx = copy_rtx (to_rtx);
4861 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4864 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4865 value, type, cleared,
4866 get_alias_set (TREE_TYPE (field)));
4869 else if (TREE_CODE (type) == ARRAY_TYPE
4870 || TREE_CODE (type) == VECTOR_TYPE)
4876 tree elttype = TREE_TYPE (type);
4878 HOST_WIDE_INT minelt = 0;
4879 HOST_WIDE_INT maxelt = 0;
4883 unsigned n_elts = 0;
4885 if (TREE_CODE (type) == ARRAY_TYPE)
4886 domain = TYPE_DOMAIN (type);
4888 /* Vectors do not have domains; look up the domain of
4889 the array embedded in the debug representation type.
4890 FIXME Would probably be more efficient to treat vectors
4891 separately from arrays. */
4893 domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
4894 domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
4895 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
4897 enum machine_mode mode = GET_MODE (target);
4899 icode = (int) vec_init_optab->handlers[mode].insn_code;
4900 if (icode != CODE_FOR_nothing)
4904 elt_size = GET_MODE_SIZE (GET_MODE_INNER (mode));
4905 n_elts = (GET_MODE_SIZE (mode) / elt_size);
4906 vector = alloca (n_elts);
4907 for (i = 0; i < n_elts; i++)
4908 vector [i] = CONST0_RTX (GET_MODE_INNER (mode));
4913 const_bounds_p = (TYPE_MIN_VALUE (domain)
4914 && TYPE_MAX_VALUE (domain)
4915 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4916 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4918 /* If we have constant bounds for the range of the type, get them. */
4921 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4922 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4925 /* If the constructor has fewer elements than the array,
4926 clear the whole array first. Similarly if this is
4927 static constructor of a non-BLKmode object. */
4928 if (cleared || (REG_P (target) && TREE_STATIC (exp)))
4932 HOST_WIDE_INT count = 0, zero_count = 0;
4933 need_to_clear = ! const_bounds_p;
4935 /* This loop is a more accurate version of the loop in
4936 mostly_zeros_p (it handles RANGE_EXPR in an index).
4937 It is also needed to check for missing elements. */
4938 for (elt = CONSTRUCTOR_ELTS (exp);
4939 elt != NULL_TREE && ! need_to_clear;
4940 elt = TREE_CHAIN (elt))
4942 tree index = TREE_PURPOSE (elt);
4943 HOST_WIDE_INT this_node_count;
4945 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4947 tree lo_index = TREE_OPERAND (index, 0);
4948 tree hi_index = TREE_OPERAND (index, 1);
4950 if (! host_integerp (lo_index, 1)
4951 || ! host_integerp (hi_index, 1))
4957 this_node_count = (tree_low_cst (hi_index, 1)
4958 - tree_low_cst (lo_index, 1) + 1);
4961 this_node_count = 1;
4963 count += this_node_count;
4964 if (mostly_zeros_p (TREE_VALUE (elt)))
4965 zero_count += this_node_count;
4968 /* Clear the entire array first if there are any missing elements,
4969 or if the incidence of zero elements is >= 75%. */
4971 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4975 if (need_to_clear && size > 0 && !vector)
4980 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4982 clear_storage (target, GEN_INT (size));
4986 else if (REG_P (target))
4987 /* Inform later passes that the old value is dead. */
4988 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4990 /* Store each element of the constructor into
4991 the corresponding element of TARGET, determined
4992 by counting the elements. */
4993 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4995 elt = TREE_CHAIN (elt), i++)
4997 enum machine_mode mode;
4998 HOST_WIDE_INT bitsize;
4999 HOST_WIDE_INT bitpos;
5001 tree value = TREE_VALUE (elt);
5002 tree index = TREE_PURPOSE (elt);
5003 rtx xtarget = target;
5005 if (cleared && initializer_zerop (value))
5008 unsignedp = TYPE_UNSIGNED (elttype);
5009 mode = TYPE_MODE (elttype);
5010 if (mode == BLKmode)
5011 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5012 ? tree_low_cst (TYPE_SIZE (elttype), 1)
5015 bitsize = GET_MODE_BITSIZE (mode);
5017 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5019 tree lo_index = TREE_OPERAND (index, 0);
5020 tree hi_index = TREE_OPERAND (index, 1);
5021 rtx index_r, pos_rtx;
5022 HOST_WIDE_INT lo, hi, count;
5028 /* If the range is constant and "small", unroll the loop. */
5030 && host_integerp (lo_index, 0)
5031 && host_integerp (hi_index, 0)
5032 && (lo = tree_low_cst (lo_index, 0),
5033 hi = tree_low_cst (hi_index, 0),
5034 count = hi - lo + 1,
5035 (GET_CODE (target) != MEM
5037 || (host_integerp (TYPE_SIZE (elttype), 1)
5038 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5041 lo -= minelt; hi -= minelt;
5042 for (; lo <= hi; lo++)
5044 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5046 if (GET_CODE (target) == MEM
5047 && !MEM_KEEP_ALIAS_SET_P (target)
5048 && TREE_CODE (type) == ARRAY_TYPE
5049 && TYPE_NONALIASED_COMPONENT (type))
5051 target = copy_rtx (target);
5052 MEM_KEEP_ALIAS_SET_P (target) = 1;
5055 store_constructor_field
5056 (target, bitsize, bitpos, mode, value, type, cleared,
5057 get_alias_set (elttype));
5062 rtx loop_start = gen_label_rtx ();
5063 rtx loop_end = gen_label_rtx ();
5066 expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
5067 unsignedp = TYPE_UNSIGNED (domain);
5069 index = build_decl (VAR_DECL, NULL_TREE, domain);
5072 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5074 SET_DECL_RTL (index, index_r);
5075 if (TREE_CODE (value) == SAVE_EXPR
5076 && SAVE_EXPR_RTL (value) == 0)
5078 /* Make sure value gets expanded once before the
5080 expand_expr (value, const0_rtx, VOIDmode, 0);
5083 store_expr (lo_index, index_r, 0);
5085 /* Build the head of the loop. */
5086 do_pending_stack_adjust ();
5088 emit_label (loop_start);
5090 /* Assign value to element index. */
5092 = convert (ssizetype,
5093 fold (build (MINUS_EXPR, TREE_TYPE (index),
5094 index, TYPE_MIN_VALUE (domain))));
5095 position = size_binop (MULT_EXPR, position,
5097 TYPE_SIZE_UNIT (elttype)));
5099 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
5100 xtarget = offset_address (target, pos_rtx,
5101 highest_pow2_factor (position));
5102 xtarget = adjust_address (xtarget, mode, 0);
5103 if (TREE_CODE (value) == CONSTRUCTOR)
5104 store_constructor (value, xtarget, cleared,
5105 bitsize / BITS_PER_UNIT);
5107 store_expr (value, xtarget, 0);
5109 /* Generate a conditional jump to exit the loop. */
5110 exit_cond = build (LT_EXPR, integer_type_node,
5112 jumpif (exit_cond, loop_end);
5114 /* Update the loop counter, and jump to the head of
5116 expand_increment (build (PREINCREMENT_EXPR,
5118 index, integer_one_node), 0, 0);
5119 emit_jump (loop_start);
5121 /* Build the end of the loop. */
5122 emit_label (loop_end);
5125 else if ((index != 0 && ! host_integerp (index, 0))
5126 || ! host_integerp (TYPE_SIZE (elttype), 1))
5134 index = ssize_int (1);
5137 index = convert (ssizetype,
5138 fold (build (MINUS_EXPR, index,
5139 TYPE_MIN_VALUE (domain))));
5141 position = size_binop (MULT_EXPR, index,
5143 TYPE_SIZE_UNIT (elttype)));
5144 xtarget = offset_address (target,
5145 expand_expr (position, 0, VOIDmode, 0),
5146 highest_pow2_factor (position));
5147 xtarget = adjust_address (xtarget, mode, 0);
5148 store_expr (value, xtarget, 0);
5155 pos = tree_low_cst (index, 0) - minelt;
5158 vector[pos] = expand_expr (value, NULL_RTX, VOIDmode, 0);
5163 bitpos = ((tree_low_cst (index, 0) - minelt)
5164 * tree_low_cst (TYPE_SIZE (elttype), 1));
5166 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5168 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
5169 && TREE_CODE (type) == ARRAY_TYPE
5170 && TYPE_NONALIASED_COMPONENT (type))
5172 target = copy_rtx (target);
5173 MEM_KEEP_ALIAS_SET_P (target) = 1;
5175 store_constructor_field (target, bitsize, bitpos, mode, value,
5176 type, cleared, get_alias_set (elttype));
5181 emit_insn (GEN_FCN (icode) (target,
5182 gen_rtx_PARALLEL (GET_MODE (target),
5183 gen_rtvec_v (n_elts, vector))));
5187 /* Set constructor assignments. */
5188 else if (TREE_CODE (type) == SET_TYPE)
5190 tree elt = CONSTRUCTOR_ELTS (exp);
5191 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
5192 tree domain = TYPE_DOMAIN (type);
5193 tree domain_min, domain_max, bitlength;
5195 /* The default implementation strategy is to extract the constant
5196 parts of the constructor, use that to initialize the target,
5197 and then "or" in whatever non-constant ranges we need in addition.
5199 If a large set is all zero or all ones, it is
5200 probably better to set it using memset (if available) or bzero.
5201 Also, if a large set has just a single range, it may also be
5202 better to first clear all the first clear the set (using
5203 bzero/memset), and set the bits we want. */
5205 /* Check for all zeros. */
5206 if (elt == NULL_TREE && size > 0)
5209 clear_storage (target, GEN_INT (size));
5213 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
5214 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
5215 bitlength = size_binop (PLUS_EXPR,
5216 size_diffop (domain_max, domain_min),
5219 nbits = tree_low_cst (bitlength, 1);
5221 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
5222 are "complicated" (more than one range), initialize (the
5223 constant parts) by copying from a constant. */
5224 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
5225 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
5227 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
5228 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
5229 char *bit_buffer = alloca (nbits);
5230 HOST_WIDE_INT word = 0;
5231 unsigned int bit_pos = 0;
5232 unsigned int ibit = 0;
5233 unsigned int offset = 0; /* In bytes from beginning of set. */
5235 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
5238 if (bit_buffer[ibit])
5240 if (BYTES_BIG_ENDIAN)
5241 word |= (1 << (set_word_size - 1 - bit_pos));
5243 word |= 1 << bit_pos;
5247 if (bit_pos >= set_word_size || ibit == nbits)
5249 if (word != 0 || ! cleared)
5251 rtx datum = gen_int_mode (word, mode);
5254 /* The assumption here is that it is safe to use
5255 XEXP if the set is multi-word, but not if
5256 it's single-word. */
5257 if (GET_CODE (target) == MEM)
5258 to_rtx = adjust_address (target, mode, offset);
5259 else if (offset == 0)
5263 emit_move_insn (to_rtx, datum);
5270 offset += set_word_size / BITS_PER_UNIT;
5275 /* Don't bother clearing storage if the set is all ones. */
5276 if (TREE_CHAIN (elt) != NULL_TREE
5277 || (TREE_PURPOSE (elt) == NULL_TREE
5279 : ( ! host_integerp (TREE_VALUE (elt), 0)
5280 || ! host_integerp (TREE_PURPOSE (elt), 0)
5281 || (tree_low_cst (TREE_VALUE (elt), 0)
5282 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5283 != (HOST_WIDE_INT) nbits))))
5284 clear_storage (target, expr_size (exp));
5286 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5288 /* Start of range of element or NULL. */
5289 tree startbit = TREE_PURPOSE (elt);
5290 /* End of range of element, or element value. */
5291 tree endbit = TREE_VALUE (elt);
5292 HOST_WIDE_INT startb, endb;
5293 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5295 bitlength_rtx = expand_expr (bitlength,
5296 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5298 /* Handle non-range tuple element like [ expr ]. */
5299 if (startbit == NULL_TREE)
5301 startbit = save_expr (endbit);
5305 startbit = convert (sizetype, startbit);
5306 endbit = convert (sizetype, endbit);
5307 if (! integer_zerop (domain_min))
5309 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5310 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5312 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5313 EXPAND_CONST_ADDRESS);
5314 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5315 EXPAND_CONST_ADDRESS);
5321 ((build_qualified_type (lang_hooks.types.type_for_mode
5322 (GET_MODE (target), 0),
5325 emit_move_insn (targetx, target);
5328 else if (GET_CODE (target) == MEM)
5333 /* Optimization: If startbit and endbit are constants divisible
5334 by BITS_PER_UNIT, call memset instead. */
5335 if (TARGET_MEM_FUNCTIONS
5336 && TREE_CODE (startbit) == INTEGER_CST
5337 && TREE_CODE (endbit) == INTEGER_CST
5338 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5339 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5341 emit_library_call (memset_libfunc, LCT_NORMAL,
5343 plus_constant (XEXP (targetx, 0),
5344 startb / BITS_PER_UNIT),
5346 constm1_rtx, TYPE_MODE (integer_type_node),
5347 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5348 TYPE_MODE (sizetype));
5351 emit_library_call (setbits_libfunc, LCT_NORMAL,
5352 VOIDmode, 4, XEXP (targetx, 0),
5353 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5354 startbit_rtx, TYPE_MODE (sizetype),
5355 endbit_rtx, TYPE_MODE (sizetype));
5358 emit_move_insn (target, targetx);
5366 /* Store the value of EXP (an expression tree)
5367 into a subfield of TARGET which has mode MODE and occupies
5368 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5369 If MODE is VOIDmode, it means that we are storing into a bit-field.
5371 If VALUE_MODE is VOIDmode, return nothing in particular.
5372 UNSIGNEDP is not used in this case.
5374 Otherwise, return an rtx for the value stored. This rtx
5375 has mode VALUE_MODE if that is convenient to do.
5376 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5378 TYPE is the type of the underlying object,
5380 ALIAS_SET is the alias set for the destination. This value will
5381 (in general) be different from that for TARGET, since TARGET is a
5382 reference to the containing structure. */
5385 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5386 enum machine_mode mode, tree exp, enum machine_mode value_mode,
5387 int unsignedp, tree type, int alias_set)
5389 HOST_WIDE_INT width_mask = 0;
5391 if (TREE_CODE (exp) == ERROR_MARK)
5394 /* If we have nothing to store, do nothing unless the expression has
5397 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5398 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5399 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5401 /* If we are storing into an unaligned field of an aligned union that is
5402 in a register, we may have the mode of TARGET being an integer mode but
5403 MODE == BLKmode. In that case, get an aligned object whose size and
5404 alignment are the same as TARGET and store TARGET into it (we can avoid
5405 the store if the field being stored is the entire width of TARGET). Then
5406 call ourselves recursively to store the field into a BLKmode version of
5407 that object. Finally, load from the object into TARGET. This is not
5408 very efficient in general, but should only be slightly more expensive
5409 than the otherwise-required unaligned accesses. Perhaps this can be
5410 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5411 twice, once with emit_move_insn and once via store_field. */
5414 && (REG_P (target) || GET_CODE (target) == SUBREG))
5416 rtx object = assign_temp (type, 0, 1, 1);
5417 rtx blk_object = adjust_address (object, BLKmode, 0);
5419 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5420 emit_move_insn (object, target);
5422 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5425 emit_move_insn (target, object);
5427 /* We want to return the BLKmode version of the data. */
5431 if (GET_CODE (target) == CONCAT)
5433 /* We're storing into a struct containing a single __complex. */
5437 return store_expr (exp, target, value_mode != VOIDmode);
5440 /* If the structure is in a register or if the component
5441 is a bit field, we cannot use addressing to access it.
5442 Use bit-field techniques or SUBREG to store in it. */
5444 if (mode == VOIDmode
5445 || (mode != BLKmode && ! direct_store[(int) mode]
5446 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5447 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5449 || GET_CODE (target) == SUBREG
5450 /* If the field isn't aligned enough to store as an ordinary memref,
5451 store it as a bit field. */
5453 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5454 || bitpos % GET_MODE_ALIGNMENT (mode))
5455 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5456 || (bitpos % BITS_PER_UNIT != 0)))
5457 /* If the RHS and field are a constant size and the size of the
5458 RHS isn't the same size as the bitfield, we must use bitfield
5461 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5462 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5464 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5466 /* If BITSIZE is narrower than the size of the type of EXP
5467 we will be narrowing TEMP. Normally, what's wanted are the
5468 low-order bits. However, if EXP's type is a record and this is
5469 big-endian machine, we want the upper BITSIZE bits. */
5470 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5471 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5472 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5473 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5474 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5478 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5480 if (mode != VOIDmode && mode != BLKmode
5481 && mode != TYPE_MODE (TREE_TYPE (exp)))
5482 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5484 /* If the modes of TARGET and TEMP are both BLKmode, both
5485 must be in memory and BITPOS must be aligned on a byte
5486 boundary. If so, we simply do a block copy. */
5487 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5489 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5490 || bitpos % BITS_PER_UNIT != 0)
5493 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5494 emit_block_move (target, temp,
5495 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5499 return value_mode == VOIDmode ? const0_rtx : target;
5502 /* Store the value in the bitfield. */
5503 store_bit_field (target, bitsize, bitpos, mode, temp,
5504 int_size_in_bytes (type));
5506 if (value_mode != VOIDmode)
5508 /* The caller wants an rtx for the value.
5509 If possible, avoid refetching from the bitfield itself. */
5511 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5514 enum machine_mode tmode;
5516 tmode = GET_MODE (temp);
5517 if (tmode == VOIDmode)
5521 return expand_and (tmode, temp,
5522 gen_int_mode (width_mask, tmode),
5525 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5526 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5527 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5530 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5531 NULL_RTX, value_mode, VOIDmode,
5532 int_size_in_bytes (type));
5538 rtx addr = XEXP (target, 0);
5539 rtx to_rtx = target;
5541 /* If a value is wanted, it must be the lhs;
5542 so make the address stable for multiple use. */
5544 if (value_mode != VOIDmode && !REG_P (addr)
5545 && ! CONSTANT_ADDRESS_P (addr)
5546 /* A frame-pointer reference is already stable. */
5547 && ! (GET_CODE (addr) == PLUS
5548 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5549 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5550 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5551 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5553 /* Now build a reference to just the desired component. */
5555 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5557 if (to_rtx == target)
5558 to_rtx = copy_rtx (to_rtx);
5560 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5561 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5562 set_mem_alias_set (to_rtx, alias_set);
5564 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5568 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5569 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5570 codes and find the ultimate containing object, which we return.
5572 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5573 bit position, and *PUNSIGNEDP to the signedness of the field.
5574 If the position of the field is variable, we store a tree
5575 giving the variable offset (in units) in *POFFSET.
5576 This offset is in addition to the bit position.
5577 If the position is not variable, we store 0 in *POFFSET.
5579 If any of the extraction expressions is volatile,
5580 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5582 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5583 is a mode that can be used to access the field. In that case, *PBITSIZE
5586 If the field describes a variable-sized object, *PMODE is set to
5587 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5588 this case, but the address of the object can be found. */
5591 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5592 HOST_WIDE_INT *pbitpos, tree *poffset,
5593 enum machine_mode *pmode, int *punsignedp,
5597 enum machine_mode mode = VOIDmode;
5598 tree offset = size_zero_node;
5599 tree bit_offset = bitsize_zero_node;
5602 /* First get the mode, signedness, and size. We do this from just the
5603 outermost expression. */
5604 if (TREE_CODE (exp) == COMPONENT_REF)
5606 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5607 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5608 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5610 *punsignedp = DECL_UNSIGNED (TREE_OPERAND (exp, 1));
5612 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5614 size_tree = TREE_OPERAND (exp, 1);
5615 *punsignedp = BIT_FIELD_REF_UNSIGNED (exp);
5619 mode = TYPE_MODE (TREE_TYPE (exp));
5620 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
5622 if (mode == BLKmode)
5623 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5625 *pbitsize = GET_MODE_BITSIZE (mode);
5630 if (! host_integerp (size_tree, 1))
5631 mode = BLKmode, *pbitsize = -1;
5633 *pbitsize = tree_low_cst (size_tree, 1);
5636 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5637 and find the ultimate containing object. */
5640 if (TREE_CODE (exp) == BIT_FIELD_REF)
5641 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5642 else if (TREE_CODE (exp) == COMPONENT_REF)
5644 tree field = TREE_OPERAND (exp, 1);
5645 tree this_offset = component_ref_field_offset (exp);
5647 /* If this field hasn't been filled in yet, don't go
5648 past it. This should only happen when folding expressions
5649 made during type construction. */
5650 if (this_offset == 0)
5653 offset = size_binop (PLUS_EXPR, offset, this_offset);
5654 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5655 DECL_FIELD_BIT_OFFSET (field));
5657 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5660 else if (TREE_CODE (exp) == ARRAY_REF
5661 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5663 tree index = TREE_OPERAND (exp, 1);
5664 tree low_bound = array_ref_low_bound (exp);
5665 tree unit_size = array_ref_element_size (exp);
5667 /* We assume all arrays have sizes that are a multiple of a byte.
5668 First subtract the lower bound, if any, in the type of the
5669 index, then convert to sizetype and multiply by the size of the
5671 if (! integer_zerop (low_bound))
5672 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5675 offset = size_binop (PLUS_EXPR, offset,
5676 size_binop (MULT_EXPR,
5677 convert (sizetype, index),
5681 /* We can go inside most conversions: all NON_VALUE_EXPRs, all normal
5682 conversions that don't change the mode, and all view conversions
5683 except those that need to "step up" the alignment. */
5684 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5685 && ! (TREE_CODE (exp) == VIEW_CONVERT_EXPR
5686 && ! ((TYPE_ALIGN (TREE_TYPE (exp))
5687 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5689 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5690 < BIGGEST_ALIGNMENT)
5691 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5692 || TYPE_ALIGN_OK (TREE_TYPE
5693 (TREE_OPERAND (exp, 0))))))
5694 && ! ((TREE_CODE (exp) == NOP_EXPR
5695 || TREE_CODE (exp) == CONVERT_EXPR)
5696 && (TYPE_MODE (TREE_TYPE (exp))
5697 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5700 /* If any reference in the chain is volatile, the effect is volatile. */
5701 if (TREE_THIS_VOLATILE (exp))
5704 exp = TREE_OPERAND (exp, 0);
5707 /* If OFFSET is constant, see if we can return the whole thing as a
5708 constant bit position. Otherwise, split it up. */
5709 if (host_integerp (offset, 0)
5710 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5712 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5713 && host_integerp (tem, 0))
5714 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5716 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5722 /* Return a tree of sizetype representing the size, in bytes, of the element
5723 of EXP, an ARRAY_REF. */
5726 array_ref_element_size (tree exp)
5728 tree aligned_size = TREE_OPERAND (exp, 3);
5729 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5731 /* If a size was specified in the ARRAY_REF, it's the size measured
5732 in alignment units of the element type. So multiply by that value. */
5734 return size_binop (MULT_EXPR, aligned_size,
5735 size_int (TYPE_ALIGN (elmt_type) / BITS_PER_UNIT));
5737 /* Otherwise, take the size from that of the element type. Substitute
5738 any PLACEHOLDER_EXPR that we have. */
5740 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
5743 /* Return a tree representing the lower bound of the array mentioned in
5744 EXP, an ARRAY_REF. */
5747 array_ref_low_bound (tree exp)
5749 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5751 /* If a lower bound is specified in EXP, use it. */
5752 if (TREE_OPERAND (exp, 2))
5753 return TREE_OPERAND (exp, 2);
5755 /* Otherwise, if there is a domain type and it has a lower bound, use it,
5756 substituting for a PLACEHOLDER_EXPR as needed. */
5757 if (domain_type && TYPE_MIN_VALUE (domain_type))
5758 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
5760 /* Otherwise, return a zero of the appropriate type. */
5761 return fold_convert (TREE_TYPE (TREE_OPERAND (exp, 1)), integer_zero_node);
5764 /* Return a tree representing the offset, in bytes, of the field referenced
5765 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
5768 component_ref_field_offset (tree exp)
5770 tree aligned_offset = TREE_OPERAND (exp, 2);
5771 tree field = TREE_OPERAND (exp, 1);
5773 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
5774 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
5777 return size_binop (MULT_EXPR, aligned_offset,
5778 size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT));
5780 /* Otherwise, take the offset from that of the field. Substitute
5781 any PLACEHOLDER_EXPR that we have. */
5783 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
5786 /* Return 1 if T is an expression that get_inner_reference handles. */
5789 handled_component_p (tree t)
5791 switch (TREE_CODE (t))
5796 case ARRAY_RANGE_REF:
5797 case NON_LVALUE_EXPR:
5798 case VIEW_CONVERT_EXPR:
5801 /* ??? Sure they are handled, but get_inner_reference may return
5802 a different PBITSIZE, depending upon whether the expression is
5803 wrapped up in a NOP_EXPR or not, e.g. for bitfields. */
5806 return (TYPE_MODE (TREE_TYPE (t))
5807 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5814 /* Given an rtx VALUE that may contain additions and multiplications, return
5815 an equivalent value that just refers to a register, memory, or constant.
5816 This is done by generating instructions to perform the arithmetic and
5817 returning a pseudo-register containing the value.
5819 The returned value may be a REG, SUBREG, MEM or constant. */
5822 force_operand (rtx value, rtx target)
5825 /* Use subtarget as the target for operand 0 of a binary operation. */
5826 rtx subtarget = get_subtarget (target);
5827 enum rtx_code code = GET_CODE (value);
5829 /* Check for subreg applied to an expression produced by loop optimizer. */
5831 && !REG_P (SUBREG_REG (value))
5832 && GET_CODE (SUBREG_REG (value)) != MEM)
5834 value = simplify_gen_subreg (GET_MODE (value),
5835 force_reg (GET_MODE (SUBREG_REG (value)),
5836 force_operand (SUBREG_REG (value),
5838 GET_MODE (SUBREG_REG (value)),
5839 SUBREG_BYTE (value));
5840 code = GET_CODE (value);
5843 /* Check for a PIC address load. */
5844 if ((code == PLUS || code == MINUS)
5845 && XEXP (value, 0) == pic_offset_table_rtx
5846 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5847 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5848 || GET_CODE (XEXP (value, 1)) == CONST))
5851 subtarget = gen_reg_rtx (GET_MODE (value));
5852 emit_move_insn (subtarget, value);
5856 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5859 target = gen_reg_rtx (GET_MODE (value));
5860 convert_move (target, force_operand (XEXP (value, 0), NULL),
5861 code == ZERO_EXTEND);
5865 if (ARITHMETIC_P (value))
5867 op2 = XEXP (value, 1);
5868 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
5870 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5873 op2 = negate_rtx (GET_MODE (value), op2);
5876 /* Check for an addition with OP2 a constant integer and our first
5877 operand a PLUS of a virtual register and something else. In that
5878 case, we want to emit the sum of the virtual register and the
5879 constant first and then add the other value. This allows virtual
5880 register instantiation to simply modify the constant rather than
5881 creating another one around this addition. */
5882 if (code == PLUS && GET_CODE (op2) == CONST_INT
5883 && GET_CODE (XEXP (value, 0)) == PLUS
5884 && REG_P (XEXP (XEXP (value, 0), 0))
5885 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5886 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5888 rtx temp = expand_simple_binop (GET_MODE (value), code,
5889 XEXP (XEXP (value, 0), 0), op2,
5890 subtarget, 0, OPTAB_LIB_WIDEN);
5891 return expand_simple_binop (GET_MODE (value), code, temp,
5892 force_operand (XEXP (XEXP (value,
5894 target, 0, OPTAB_LIB_WIDEN);
5897 op1 = force_operand (XEXP (value, 0), subtarget);
5898 op2 = force_operand (op2, NULL_RTX);
5902 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5904 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5905 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5906 target, 1, OPTAB_LIB_WIDEN);
5908 return expand_divmod (0,
5909 FLOAT_MODE_P (GET_MODE (value))
5910 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5911 GET_MODE (value), op1, op2, target, 0);
5914 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5918 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5922 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5926 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5927 target, 0, OPTAB_LIB_WIDEN);
5930 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5931 target, 1, OPTAB_LIB_WIDEN);
5934 if (UNARY_P (value))
5936 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5937 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
5940 #ifdef INSN_SCHEDULING
5941 /* On machines that have insn scheduling, we want all memory reference to be
5942 explicit, so we need to deal with such paradoxical SUBREGs. */
5943 if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
5944 && (GET_MODE_SIZE (GET_MODE (value))
5945 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5947 = simplify_gen_subreg (GET_MODE (value),
5948 force_reg (GET_MODE (SUBREG_REG (value)),
5949 force_operand (SUBREG_REG (value),
5951 GET_MODE (SUBREG_REG (value)),
5952 SUBREG_BYTE (value));
5958 /* Subroutine of expand_expr: return nonzero iff there is no way that
5959 EXP can reference X, which is being modified. TOP_P is nonzero if this
5960 call is going to be used to determine whether we need a temporary
5961 for EXP, as opposed to a recursive call to this function.
5963 It is always safe for this routine to return zero since it merely
5964 searches for optimization opportunities. */
5967 safe_from_p (rtx x, tree exp, int top_p)
5971 static tree save_expr_list;
5974 /* If EXP has varying size, we MUST use a target since we currently
5975 have no way of allocating temporaries of variable size
5976 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5977 So we assume here that something at a higher level has prevented a
5978 clash. This is somewhat bogus, but the best we can do. Only
5979 do this when X is BLKmode and when we are at the top level. */
5980 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5981 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5982 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5983 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5984 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5986 && GET_MODE (x) == BLKmode)
5987 /* If X is in the outgoing argument area, it is always safe. */
5988 || (GET_CODE (x) == MEM
5989 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5990 || (GET_CODE (XEXP (x, 0)) == PLUS
5991 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5994 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5995 find the underlying pseudo. */
5996 if (GET_CODE (x) == SUBREG)
5999 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6003 /* A SAVE_EXPR might appear many times in the expression passed to the
6004 top-level safe_from_p call, and if it has a complex subexpression,
6005 examining it multiple times could result in a combinatorial explosion.
6006 E.g. on an Alpha running at least 200MHz, a Fortran testcase compiled
6007 with optimization took about 28 minutes to compile -- even though it was
6008 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
6009 and turn that off when we are done. We keep a list of the SAVE_EXPRs
6010 we have processed. Note that the only test of top_p was above. */
6019 rtn = safe_from_p (x, exp, 0);
6021 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
6022 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
6027 /* Now look at our tree code and possibly recurse. */
6028 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
6031 exp_rtl = DECL_RTL_IF_SET (exp);
6038 if (TREE_CODE (exp) == TREE_LIST)
6042 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
6044 exp = TREE_CHAIN (exp);
6047 if (TREE_CODE (exp) != TREE_LIST)
6048 return safe_from_p (x, exp, 0);
6051 else if (TREE_CODE (exp) == ERROR_MARK)
6052 return 1; /* An already-visited SAVE_EXPR? */
6057 /* The only case we look at here is the DECL_INITIAL inside a
6059 return (TREE_CODE (exp) != DECL_EXPR
6060 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
6061 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
6062 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
6066 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
6071 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6075 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6076 the expression. If it is set, we conflict iff we are that rtx or
6077 both are in memory. Otherwise, we check all operands of the
6078 expression recursively. */
6080 switch (TREE_CODE (exp))
6083 /* If the operand is static or we are static, we can't conflict.
6084 Likewise if we don't conflict with the operand at all. */
6085 if (staticp (TREE_OPERAND (exp, 0))
6086 || TREE_STATIC (exp)
6087 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6090 /* Otherwise, the only way this can conflict is if we are taking
6091 the address of a DECL a that address if part of X, which is
6093 exp = TREE_OPERAND (exp, 0);
6096 if (!DECL_RTL_SET_P (exp)
6097 || GET_CODE (DECL_RTL (exp)) != MEM)
6100 exp_rtl = XEXP (DECL_RTL (exp), 0);
6105 if (GET_CODE (x) == MEM
6106 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6107 get_alias_set (exp)))
6112 /* Assume that the call will clobber all hard registers and
6114 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6115 || GET_CODE (x) == MEM)
6120 /* If a sequence exists, we would have to scan every instruction
6121 in the sequence to see if it was safe. This is probably not
6123 if (RTL_EXPR_SEQUENCE (exp))
6126 exp_rtl = RTL_EXPR_RTL (exp);
6129 case WITH_CLEANUP_EXPR:
6130 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
6133 case CLEANUP_POINT_EXPR:
6134 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6137 exp_rtl = SAVE_EXPR_RTL (exp);
6141 /* If we've already scanned this, don't do it again. Otherwise,
6142 show we've scanned it and record for clearing the flag if we're
6144 if (TREE_PRIVATE (exp))
6147 TREE_PRIVATE (exp) = 1;
6148 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6150 TREE_PRIVATE (exp) = 0;
6154 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
6158 /* The only operand we look at is operand 1. The rest aren't
6159 part of the expression. */
6160 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
6166 /* If we have an rtx, we do not need to scan our operands. */
6170 nops = first_rtl_op (TREE_CODE (exp));
6171 for (i = 0; i < nops; i++)
6172 if (TREE_OPERAND (exp, i) != 0
6173 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6176 /* If this is a language-specific tree code, it may require
6177 special handling. */
6178 if ((unsigned int) TREE_CODE (exp)
6179 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
6180 && !lang_hooks.safe_from_p (x, exp))
6184 /* If we have an rtl, find any enclosed object. Then see if we conflict
6188 if (GET_CODE (exp_rtl) == SUBREG)
6190 exp_rtl = SUBREG_REG (exp_rtl);
6192 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6196 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6197 are memory and they conflict. */
6198 return ! (rtx_equal_p (x, exp_rtl)
6199 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
6200 && true_dependence (exp_rtl, VOIDmode, x,
6201 rtx_addr_varies_p)));
6204 /* If we reach here, it is safe. */
6208 /* Subroutine of expand_expr: return rtx if EXP is a
6209 variable or parameter; else return 0. */
6215 switch (TREE_CODE (exp))
6219 return DECL_RTL (exp);
6225 /* Return the highest power of two that EXP is known to be a multiple of.
6226 This is used in updating alignment of MEMs in array references. */
6228 static unsigned HOST_WIDE_INT
6229 highest_pow2_factor (tree exp)
6231 unsigned HOST_WIDE_INT c0, c1;
6233 switch (TREE_CODE (exp))
6236 /* We can find the lowest bit that's a one. If the low
6237 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6238 We need to handle this case since we can find it in a COND_EXPR,
6239 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
6240 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6242 if (TREE_CONSTANT_OVERFLOW (exp))
6243 return BIGGEST_ALIGNMENT;
6246 /* Note: tree_low_cst is intentionally not used here,
6247 we don't care about the upper bits. */
6248 c0 = TREE_INT_CST_LOW (exp);
6250 return c0 ? c0 : BIGGEST_ALIGNMENT;
6254 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6255 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6256 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6257 return MIN (c0, c1);
6260 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6261 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6264 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6266 if (integer_pow2p (TREE_OPERAND (exp, 1))
6267 && host_integerp (TREE_OPERAND (exp, 1), 1))
6269 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6270 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6271 return MAX (1, c0 / c1);
6275 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6277 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6280 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6283 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6284 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6285 return MIN (c0, c1);
6294 /* Similar, except that the alignment requirements of TARGET are
6295 taken into account. Assume it is at least as aligned as its
6296 type, unless it is a COMPONENT_REF in which case the layout of
6297 the structure gives the alignment. */
6299 static unsigned HOST_WIDE_INT
6300 highest_pow2_factor_for_target (tree target, tree exp)
6302 unsigned HOST_WIDE_INT target_align, factor;
6304 factor = highest_pow2_factor (exp);
6305 if (TREE_CODE (target) == COMPONENT_REF)
6306 target_align = DECL_ALIGN (TREE_OPERAND (target, 1)) / BITS_PER_UNIT;
6308 target_align = TYPE_ALIGN (TREE_TYPE (target)) / BITS_PER_UNIT;
6309 return MAX (factor, target_align);
6312 /* Expands variable VAR. */
6315 expand_var (tree var)
6317 if (DECL_EXTERNAL (var))
6320 if (TREE_STATIC (var))
6321 /* If this is an inlined copy of a static local variable,
6322 look up the original decl. */
6323 var = DECL_ORIGIN (var);
6325 if (TREE_STATIC (var)
6326 ? !TREE_ASM_WRITTEN (var)
6327 : !DECL_RTL_SET_P (var))
6329 if (TREE_CODE (var) == VAR_DECL && DECL_DEFER_OUTPUT (var))
6331 /* Prepare a mem & address for the decl. */
6334 if (TREE_STATIC (var))
6337 x = gen_rtx_MEM (DECL_MODE (var),
6338 gen_reg_rtx (Pmode));
6340 set_mem_attributes (x, var, 1);
6341 SET_DECL_RTL (var, x);
6343 else if (lang_hooks.expand_decl (var))
6345 else if (TREE_CODE (var) == VAR_DECL && !TREE_STATIC (var))
6347 else if (TREE_CODE (var) == VAR_DECL && TREE_STATIC (var))
6348 rest_of_decl_compilation (var, NULL, 0, 0);
6349 else if (TREE_CODE (var) == TYPE_DECL
6350 || TREE_CODE (var) == CONST_DECL
6351 || TREE_CODE (var) == FUNCTION_DECL
6352 || TREE_CODE (var) == LABEL_DECL)
6353 /* No expansion needed. */;
6359 /* Expands declarations of variables in list VARS. */
6362 expand_vars (tree vars)
6364 for (; vars; vars = TREE_CHAIN (vars))
6368 if (DECL_EXTERNAL (var))
6372 expand_decl_init (var);
6376 /* Subroutine of expand_expr. Expand the two operands of a binary
6377 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6378 The value may be stored in TARGET if TARGET is nonzero. The
6379 MODIFIER argument is as documented by expand_expr. */
6382 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6383 enum expand_modifier modifier)
6385 if (! safe_from_p (target, exp1, 1))
6387 if (operand_equal_p (exp0, exp1, 0))
6389 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6390 *op1 = copy_rtx (*op0);
6394 /* If we need to preserve evaluation order, copy exp0 into its own
6395 temporary variable so that it can't be clobbered by exp1. */
6396 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6397 exp0 = save_expr (exp0);
6398 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6399 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6404 /* expand_expr: generate code for computing expression EXP.
6405 An rtx for the computed value is returned. The value is never null.
6406 In the case of a void EXP, const0_rtx is returned.
6408 The value may be stored in TARGET if TARGET is nonzero.
6409 TARGET is just a suggestion; callers must assume that
6410 the rtx returned may not be the same as TARGET.
6412 If TARGET is CONST0_RTX, it means that the value will be ignored.
6414 If TMODE is not VOIDmode, it suggests generating the
6415 result in mode TMODE. But this is done only when convenient.
6416 Otherwise, TMODE is ignored and the value generated in its natural mode.
6417 TMODE is just a suggestion; callers must assume that
6418 the rtx returned may not have mode TMODE.
6420 Note that TARGET may have neither TMODE nor MODE. In that case, it
6421 probably will not be used.
6423 If MODIFIER is EXPAND_SUM then when EXP is an addition
6424 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6425 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6426 products as above, or REG or MEM, or constant.
6427 Ordinarily in such cases we would output mul or add instructions
6428 and then return a pseudo reg containing the sum.
6430 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6431 it also marks a label as absolutely required (it can't be dead).
6432 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6433 This is used for outputting expressions used in initializers.
6435 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6436 with a constant address even if that address is not normally legitimate.
6437 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6439 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6440 a call parameter. Such targets require special care as we haven't yet
6441 marked TARGET so that it's safe from being trashed by libcalls. We
6442 don't want to use TARGET for anything but the final result;
6443 Intermediate values must go elsewhere. Additionally, calls to
6444 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6446 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6447 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6448 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
6449 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6452 static rtx expand_expr_real_1 (tree, rtx, enum machine_mode,
6453 enum expand_modifier, rtx *);
6456 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
6457 enum expand_modifier modifier, rtx *alt_rtl)
6460 rtx ret, last = NULL;
6462 /* Handle ERROR_MARK before anybody tries to access its type. */
6463 if (TREE_CODE (exp) == ERROR_MARK
6464 || TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK)
6466 ret = CONST0_RTX (tmode);
6467 return ret ? ret : const0_rtx;
6470 if (flag_non_call_exceptions)
6472 rn = lookup_stmt_eh_region (exp);
6473 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */
6475 last = get_last_insn ();
6478 /* If this is an expression of some kind and it has an associated line
6479 number, then emit the line number before expanding the expression.
6481 We need to save and restore the file and line information so that
6482 errors discovered during expansion are emitted with the right
6483 information. It would be better of the diagnostic routines
6484 used the file/line information embedded in the tree nodes rather
6486 if (cfun && EXPR_HAS_LOCATION (exp))
6488 location_t saved_location = input_location;
6489 input_location = EXPR_LOCATION (exp);
6490 emit_line_note (input_location);
6492 /* Record where the insns produced belong. */
6493 record_block_change (TREE_BLOCK (exp));
6495 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6497 input_location = saved_location;
6501 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6504 /* If using non-call exceptions, mark all insns that may trap.
6505 expand_call() will mark CALL_INSNs before we get to this code,
6506 but it doesn't handle libcalls, and these may trap. */
6510 for (insn = next_real_insn (last); insn;
6511 insn = next_real_insn (insn))
6513 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
6514 /* If we want exceptions for non-call insns, any
6515 may_trap_p instruction may throw. */
6516 && GET_CODE (PATTERN (insn)) != CLOBBER
6517 && GET_CODE (PATTERN (insn)) != USE
6518 && (GET_CODE (insn) == CALL_INSN || may_trap_p (PATTERN (insn))))
6520 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
6530 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
6531 enum expand_modifier modifier, rtx *alt_rtl)
6534 tree type = TREE_TYPE (exp);
6536 enum machine_mode mode;
6537 enum tree_code code = TREE_CODE (exp);
6539 rtx subtarget, original_target;
6543 mode = TYPE_MODE (type);
6544 unsignedp = TYPE_UNSIGNED (type);
6546 /* Use subtarget as the target for operand 0 of a binary operation. */
6547 subtarget = get_subtarget (target);
6548 original_target = target;
6549 ignore = (target == const0_rtx
6550 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6551 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6552 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
6553 && TREE_CODE (type) == VOID_TYPE));
6555 /* If we are going to ignore this result, we need only do something
6556 if there is a side-effect somewhere in the expression. If there
6557 is, short-circuit the most common cases here. Note that we must
6558 not call expand_expr with anything but const0_rtx in case this
6559 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6563 if (! TREE_SIDE_EFFECTS (exp))
6566 /* Ensure we reference a volatile object even if value is ignored, but
6567 don't do this if all we are doing is taking its address. */
6568 if (TREE_THIS_VOLATILE (exp)
6569 && TREE_CODE (exp) != FUNCTION_DECL
6570 && mode != VOIDmode && mode != BLKmode
6571 && modifier != EXPAND_CONST_ADDRESS)
6573 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6574 if (GET_CODE (temp) == MEM)
6575 temp = copy_to_reg (temp);
6579 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6580 || code == INDIRECT_REF || code == BUFFER_REF)
6581 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6584 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6585 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6587 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6588 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6591 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6592 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6593 /* If the second operand has no side effects, just evaluate
6595 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6597 else if (code == BIT_FIELD_REF)
6599 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6600 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6601 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6608 /* If will do cse, generate all results into pseudo registers
6609 since 1) that allows cse to find more things
6610 and 2) otherwise cse could produce an insn the machine
6611 cannot support. An exception is a CONSTRUCTOR into a multi-word
6612 MEM: that's much more likely to be most efficient into the MEM.
6613 Another is a CALL_EXPR which must return in memory. */
6615 if (! cse_not_expected && mode != BLKmode && target
6616 && (!REG_P (target) || REGNO (target) < FIRST_PSEUDO_REGISTER)
6617 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
6618 && ! (code == CALL_EXPR && aggregate_value_p (exp, exp)))
6625 tree function = decl_function_context (exp);
6627 temp = label_rtx (exp);
6628 temp = gen_rtx_LABEL_REF (Pmode, temp);
6630 if (function != current_function_decl
6632 LABEL_REF_NONLOCAL_P (temp) = 1;
6634 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
6639 if (!DECL_RTL_SET_P (exp))
6641 error ("%Jprior parameter's size depends on '%D'", exp, exp);
6642 return CONST0_RTX (mode);
6645 /* ... fall through ... */
6648 /* If a static var's type was incomplete when the decl was written,
6649 but the type is complete now, lay out the decl now. */
6650 if (DECL_SIZE (exp) == 0
6651 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6652 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6653 layout_decl (exp, 0);
6655 /* ... fall through ... */
6659 if (DECL_RTL (exp) == 0)
6662 /* Ensure variable marked as used even if it doesn't go through
6663 a parser. If it hasn't be used yet, write out an external
6665 if (! TREE_USED (exp))
6667 assemble_external (exp);
6668 TREE_USED (exp) = 1;
6671 /* Show we haven't gotten RTL for this yet. */
6674 /* Handle variables inherited from containing functions. */
6675 context = decl_function_context (exp);
6677 if (context != 0 && context != current_function_decl
6678 /* If var is static, we don't need a static chain to access it. */
6679 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6680 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6684 /* Mark as non-local and addressable. */
6685 DECL_NONLOCAL (exp) = 1;
6686 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6688 lang_hooks.mark_addressable (exp);
6689 if (GET_CODE (DECL_RTL (exp)) != MEM)
6691 addr = XEXP (DECL_RTL (exp), 0);
6692 if (GET_CODE (addr) == MEM)
6694 = replace_equiv_address (addr,
6695 fix_lexical_addr (XEXP (addr, 0), exp));
6697 addr = fix_lexical_addr (addr, exp);
6699 temp = replace_equiv_address (DECL_RTL (exp), addr);
6702 /* This is the case of an array whose size is to be determined
6703 from its initializer, while the initializer is still being parsed.
6706 else if (GET_CODE (DECL_RTL (exp)) == MEM
6707 && REG_P (XEXP (DECL_RTL (exp), 0)))
6708 temp = validize_mem (DECL_RTL (exp));
6710 /* If DECL_RTL is memory, we are in the normal case and either
6711 the address is not valid or it is not a register and -fforce-addr
6712 is specified, get the address into a register. */
6714 else if (GET_CODE (DECL_RTL (exp)) == MEM
6715 && modifier != EXPAND_CONST_ADDRESS
6716 && modifier != EXPAND_SUM
6717 && modifier != EXPAND_INITIALIZER
6718 && (! memory_address_p (DECL_MODE (exp),
6719 XEXP (DECL_RTL (exp), 0))
6721 && !REG_P (XEXP (DECL_RTL (exp), 0)))))
6724 *alt_rtl = DECL_RTL (exp);
6725 temp = replace_equiv_address (DECL_RTL (exp),
6726 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6729 /* If we got something, return it. But first, set the alignment
6730 if the address is a register. */
6733 if (GET_CODE (temp) == MEM && REG_P (XEXP (temp, 0)))
6734 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6739 /* If the mode of DECL_RTL does not match that of the decl, it
6740 must be a promoted value. We return a SUBREG of the wanted mode,
6741 but mark it so that we know that it was already extended. */
6743 if (REG_P (DECL_RTL (exp))
6744 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6746 /* Get the signedness used for this variable. Ensure we get the
6747 same mode we got when the variable was declared. */
6748 if (GET_MODE (DECL_RTL (exp))
6749 != promote_mode (type, DECL_MODE (exp), &unsignedp,
6750 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
6753 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6754 SUBREG_PROMOTED_VAR_P (temp) = 1;
6755 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6759 return DECL_RTL (exp);
6762 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6763 TREE_INT_CST_HIGH (exp), mode);
6765 /* ??? If overflow is set, fold will have done an incomplete job,
6766 which can result in (plus xx (const_int 0)), which can get
6767 simplified by validate_replace_rtx during virtual register
6768 instantiation, which can result in unrecognizable insns.
6769 Avoid this by forcing all overflows into registers. */
6770 if (TREE_CONSTANT_OVERFLOW (exp)
6771 && modifier != EXPAND_INITIALIZER)
6772 temp = force_reg (mode, temp);
6777 return const_vector_from_tree (exp);
6780 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
6783 /* If optimized, generate immediate CONST_DOUBLE
6784 which will be turned into memory by reload if necessary.
6786 We used to force a register so that loop.c could see it. But
6787 this does not allow gen_* patterns to perform optimizations with
6788 the constants. It also produces two insns in cases like "x = 1.0;".
6789 On most machines, floating-point constants are not permitted in
6790 many insns, so we'd end up copying it to a register in any case.
6792 Now, we do the copying in expand_binop, if appropriate. */
6793 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6794 TYPE_MODE (TREE_TYPE (exp)));
6797 /* Handle evaluating a complex constant in a CONCAT target. */
6798 if (original_target && GET_CODE (original_target) == CONCAT)
6800 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6803 rtarg = XEXP (original_target, 0);
6804 itarg = XEXP (original_target, 1);
6806 /* Move the real and imaginary parts separately. */
6807 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
6808 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
6811 emit_move_insn (rtarg, op0);
6813 emit_move_insn (itarg, op1);
6815 return original_target;
6818 /* ... fall through ... */
6821 temp = output_constant_def (exp, 1);
6823 /* temp contains a constant address.
6824 On RISC machines where a constant address isn't valid,
6825 make some insns to get that address into a register. */
6826 if (modifier != EXPAND_CONST_ADDRESS
6827 && modifier != EXPAND_INITIALIZER
6828 && modifier != EXPAND_SUM
6829 && (! memory_address_p (mode, XEXP (temp, 0))
6830 || flag_force_addr))
6831 return replace_equiv_address (temp,
6832 copy_rtx (XEXP (temp, 0)));
6836 context = decl_function_context (exp);
6838 /* If this SAVE_EXPR was at global context, assume we are an
6839 initialization function and move it into our context. */
6841 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6843 if (context == current_function_decl)
6846 /* If this is non-local, handle it. */
6849 /* The following call just exists to abort if the context is
6850 not of a containing function. */
6851 find_function_data (context);
6853 temp = SAVE_EXPR_RTL (exp);
6854 if (temp && REG_P (temp))
6856 put_var_into_stack (exp, /*rescan=*/true);
6857 temp = SAVE_EXPR_RTL (exp);
6859 if (temp == 0 || GET_CODE (temp) != MEM)
6862 replace_equiv_address (temp,
6863 fix_lexical_addr (XEXP (temp, 0), exp));
6865 if (SAVE_EXPR_RTL (exp) == 0)
6867 if (mode == VOIDmode)
6870 temp = assign_temp (build_qualified_type (type,
6872 | TYPE_QUAL_CONST)),
6875 SAVE_EXPR_RTL (exp) = temp;
6876 if (!optimize && REG_P (temp))
6877 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6880 /* If the mode of TEMP does not match that of the expression, it
6881 must be a promoted value. We pass store_expr a SUBREG of the
6882 wanted mode but mark it so that we know that it was already
6885 if (REG_P (temp) && GET_MODE (temp) != mode)
6887 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6888 promote_mode (type, mode, &unsignedp, 0);
6889 SUBREG_PROMOTED_VAR_P (temp) = 1;
6890 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6893 if (temp == const0_rtx)
6894 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6896 store_expr (TREE_OPERAND (exp, 0), temp,
6897 modifier == EXPAND_STACK_PARM ? 2 : 0);
6899 TREE_USED (exp) = 1;
6902 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6903 must be a promoted value. We return a SUBREG of the wanted mode,
6904 but mark it so that we know that it was already extended. */
6906 if (REG_P (SAVE_EXPR_RTL (exp))
6907 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6909 /* Compute the signedness and make the proper SUBREG. */
6910 promote_mode (type, mode, &unsignedp, 0);
6911 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6912 SUBREG_PROMOTED_VAR_P (temp) = 1;
6913 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6917 return SAVE_EXPR_RTL (exp);
6922 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6923 TREE_OPERAND (exp, 0)
6924 = lang_hooks.unsave_expr_now (TREE_OPERAND (exp, 0));
6929 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6930 expand_goto (TREE_OPERAND (exp, 0));
6932 expand_computed_goto (TREE_OPERAND (exp, 0));
6935 /* These are lowered during gimplification, so we should never ever
6941 case LABELED_BLOCK_EXPR:
6942 if (LABELED_BLOCK_BODY (exp))
6943 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1);
6944 /* Should perhaps use expand_label, but this is simpler and safer. */
6945 do_pending_stack_adjust ();
6946 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6949 case EXIT_BLOCK_EXPR:
6950 if (EXIT_BLOCK_RETURN (exp))
6951 sorry ("returned value in block_exit_expr");
6952 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6957 tree block = BIND_EXPR_BLOCK (exp);
6960 if (TREE_CODE (BIND_EXPR_BODY (exp)) != RTL_EXPR)
6962 /* If we're in functions-as-trees mode, this BIND_EXPR represents
6963 the block, so we need to emit NOTE_INSN_BLOCK_* notes. */
6964 mark_ends = (block != NULL_TREE);
6965 expand_start_bindings_and_block (mark_ends ? 0 : 2, block);
6969 /* If we're not in functions-as-trees mode, we've already emitted
6970 those notes into our RTL_EXPR, so we just want to splice our BLOCK
6971 into the enclosing one. */
6974 /* Need to open a binding contour here because
6975 if there are any cleanups they must be contained here. */
6976 expand_start_bindings_and_block (2, NULL_TREE);
6978 /* Mark the corresponding BLOCK for output in its proper place. */
6981 if (TREE_USED (block))
6983 lang_hooks.decls.insert_block (block);
6987 /* If VARS have not yet been expanded, expand them now. */
6988 expand_vars (BIND_EXPR_VARS (exp));
6990 /* TARGET was clobbered early in this function. The correct
6991 indicator or whether or not we need the value of this
6992 expression is the IGNORE variable. */
6993 temp = expand_expr (BIND_EXPR_BODY (exp),
6994 ignore ? const0_rtx : target,
6997 expand_end_bindings (BIND_EXPR_VARS (exp), mark_ends, 0);
7003 if (RTL_EXPR_SEQUENCE (exp))
7005 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
7007 emit_insn (RTL_EXPR_SEQUENCE (exp));
7008 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
7010 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
7011 free_temps_for_rtl_expr (exp);
7013 *alt_rtl = RTL_EXPR_ALT_RTL (exp);
7014 return RTL_EXPR_RTL (exp);
7017 /* If we don't need the result, just ensure we evaluate any
7023 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
7024 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
7029 /* All elts simple constants => refer to a constant in memory. But
7030 if this is a non-BLKmode mode, let it store a field at a time
7031 since that should make a CONST_INT or CONST_DOUBLE when we
7032 fold. Likewise, if we have a target we can use, it is best to
7033 store directly into the target unless the type is large enough
7034 that memcpy will be used. If we are making an initializer and
7035 all operands are constant, put it in memory as well.
7037 FIXME: Avoid trying to fill vector constructors piece-meal.
7038 Output them with output_constant_def below unless we're sure
7039 they're zeros. This should go away when vector initializers
7040 are treated like VECTOR_CST instead of arrays.
7042 else if ((TREE_STATIC (exp)
7043 && ((mode == BLKmode
7044 && ! (target != 0 && safe_from_p (target, exp, 1)))
7045 || TREE_ADDRESSABLE (exp)
7046 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
7047 && (! MOVE_BY_PIECES_P
7048 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
7050 && ! mostly_zeros_p (exp))))
7051 || ((modifier == EXPAND_INITIALIZER
7052 || modifier == EXPAND_CONST_ADDRESS)
7053 && TREE_CONSTANT (exp)))
7055 rtx constructor = output_constant_def (exp, 1);
7057 if (modifier != EXPAND_CONST_ADDRESS
7058 && modifier != EXPAND_INITIALIZER
7059 && modifier != EXPAND_SUM)
7060 constructor = validize_mem (constructor);
7066 /* Handle calls that pass values in multiple non-contiguous
7067 locations. The Irix 6 ABI has examples of this. */
7068 if (target == 0 || ! safe_from_p (target, exp, 1)
7069 || GET_CODE (target) == PARALLEL
7070 || modifier == EXPAND_STACK_PARM)
7072 = assign_temp (build_qualified_type (type,
7074 | (TREE_READONLY (exp)
7075 * TYPE_QUAL_CONST))),
7076 0, TREE_ADDRESSABLE (exp), 1);
7078 store_constructor (exp, target, 0, int_expr_size (exp));
7084 tree exp1 = TREE_OPERAND (exp, 0);
7086 if (modifier != EXPAND_WRITE)
7090 t = fold_read_from_constant_string (exp);
7092 return expand_expr (t, target, tmode, modifier);
7095 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
7096 op0 = memory_address (mode, op0);
7097 temp = gen_rtx_MEM (mode, op0);
7098 set_mem_attributes (temp, exp, 0);
7100 /* If we are writing to this object and its type is a record with
7101 readonly fields, we must mark it as readonly so it will
7102 conflict with readonly references to those fields. */
7103 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
7104 RTX_UNCHANGING_P (temp) = 1;
7111 #ifdef ENABLE_CHECKING
7112 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
7117 tree array = TREE_OPERAND (exp, 0);
7118 tree low_bound = array_ref_low_bound (exp);
7119 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
7122 /* Optimize the special-case of a zero lower bound.
7124 We convert the low_bound to sizetype to avoid some problems
7125 with constant folding. (E.g. suppose the lower bound is 1,
7126 and its mode is QI. Without the conversion, (ARRAY
7127 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
7128 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
7130 if (! integer_zerop (low_bound))
7131 index = size_diffop (index, convert (sizetype, low_bound));
7133 /* Fold an expression like: "foo"[2].
7134 This is not done in fold so it won't happen inside &.
7135 Don't fold if this is for wide characters since it's too
7136 difficult to do correctly and this is a very rare case. */
7138 if (modifier != EXPAND_CONST_ADDRESS
7139 && modifier != EXPAND_INITIALIZER
7140 && modifier != EXPAND_MEMORY)
7142 tree t = fold_read_from_constant_string (exp);
7145 return expand_expr (t, target, tmode, modifier);
7148 /* If this is a constant index into a constant array,
7149 just get the value from the array. Handle both the cases when
7150 we have an explicit constructor and when our operand is a variable
7151 that was declared const. */
7153 if (modifier != EXPAND_CONST_ADDRESS
7154 && modifier != EXPAND_INITIALIZER
7155 && modifier != EXPAND_MEMORY
7156 && TREE_CODE (array) == CONSTRUCTOR
7157 && ! TREE_SIDE_EFFECTS (array)
7158 && TREE_CODE (index) == INTEGER_CST
7159 && 0 > compare_tree_int (index,
7160 list_length (CONSTRUCTOR_ELTS
7161 (TREE_OPERAND (exp, 0)))))
7165 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
7166 i = TREE_INT_CST_LOW (index);
7167 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
7171 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
7175 else if (optimize >= 1
7176 && modifier != EXPAND_CONST_ADDRESS
7177 && modifier != EXPAND_INITIALIZER
7178 && modifier != EXPAND_MEMORY
7179 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
7180 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
7181 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
7182 && targetm.binds_local_p (array))
7184 if (TREE_CODE (index) == INTEGER_CST)
7186 tree init = DECL_INITIAL (array);
7188 if (TREE_CODE (init) == CONSTRUCTOR)
7192 for (elem = CONSTRUCTOR_ELTS (init);
7194 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
7195 elem = TREE_CHAIN (elem))
7198 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
7199 return expand_expr (fold (TREE_VALUE (elem)), target,
7202 else if (TREE_CODE (init) == STRING_CST
7203 && 0 > compare_tree_int (index,
7204 TREE_STRING_LENGTH (init)))
7206 tree type = TREE_TYPE (TREE_TYPE (init));
7207 enum machine_mode mode = TYPE_MODE (type);
7209 if (GET_MODE_CLASS (mode) == MODE_INT
7210 && GET_MODE_SIZE (mode) == 1)
7211 return gen_int_mode (TREE_STRING_POINTER (init)
7212 [TREE_INT_CST_LOW (index)], mode);
7217 goto normal_inner_ref;
7220 /* If the operand is a CONSTRUCTOR, we can just extract the
7221 appropriate field if it is present. */
7222 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
7226 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
7227 elt = TREE_CHAIN (elt))
7228 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
7229 /* We can normally use the value of the field in the
7230 CONSTRUCTOR. However, if this is a bitfield in
7231 an integral mode that we can fit in a HOST_WIDE_INT,
7232 we must mask only the number of bits in the bitfield,
7233 since this is done implicitly by the constructor. If
7234 the bitfield does not meet either of those conditions,
7235 we can't do this optimization. */
7236 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
7237 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
7239 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
7240 <= HOST_BITS_PER_WIDE_INT))))
7242 if (DECL_BIT_FIELD (TREE_PURPOSE (elt))
7243 && modifier == EXPAND_STACK_PARM)
7245 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
7246 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
7248 HOST_WIDE_INT bitsize
7249 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
7250 enum machine_mode imode
7251 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
7253 if (TYPE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
7255 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7256 op0 = expand_and (imode, op0, op1, target);
7261 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
7264 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7266 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7274 goto normal_inner_ref;
7277 case ARRAY_RANGE_REF:
7280 enum machine_mode mode1;
7281 HOST_WIDE_INT bitsize, bitpos;
7284 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7285 &mode1, &unsignedp, &volatilep);
7288 /* If we got back the original object, something is wrong. Perhaps
7289 we are evaluating an expression too early. In any event, don't
7290 infinitely recurse. */
7294 /* If TEM's type is a union of variable size, pass TARGET to the inner
7295 computation, since it will need a temporary and TARGET is known
7296 to have to do. This occurs in unchecked conversion in Ada. */
7300 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7301 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7303 && modifier != EXPAND_STACK_PARM
7304 ? target : NULL_RTX),
7306 (modifier == EXPAND_INITIALIZER
7307 || modifier == EXPAND_CONST_ADDRESS
7308 || modifier == EXPAND_STACK_PARM)
7309 ? modifier : EXPAND_NORMAL);
7311 /* If this is a constant, put it into a register if it is a
7312 legitimate constant and OFFSET is 0 and memory if it isn't. */
7313 if (CONSTANT_P (op0))
7315 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7316 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7318 op0 = force_reg (mode, op0);
7320 op0 = validize_mem (force_const_mem (mode, op0));
7323 /* Otherwise, if this object not in memory and we either have an
7324 offset or a BLKmode result, put it there. This case can't occur in
7325 C, but can in Ada if we have unchecked conversion of an expression
7326 from a scalar type to an array or record type or for an
7327 ARRAY_RANGE_REF whose type is BLKmode. */
7328 else if (GET_CODE (op0) != MEM
7330 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
7332 /* If the operand is a SAVE_EXPR, we can deal with this by
7333 forcing the SAVE_EXPR into memory. */
7334 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
7336 put_var_into_stack (TREE_OPERAND (exp, 0),
7338 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7343 = build_qualified_type (TREE_TYPE (tem),
7344 (TYPE_QUALS (TREE_TYPE (tem))
7345 | TYPE_QUAL_CONST));
7346 rtx memloc = assign_temp (nt, 1, 1, 1);
7348 emit_move_insn (memloc, op0);
7355 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7358 if (GET_CODE (op0) != MEM)
7361 #ifdef POINTERS_EXTEND_UNSIGNED
7362 if (GET_MODE (offset_rtx) != Pmode)
7363 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7365 if (GET_MODE (offset_rtx) != ptr_mode)
7366 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7369 if (GET_MODE (op0) == BLKmode
7370 /* A constant address in OP0 can have VOIDmode, we must
7371 not try to call force_reg in that case. */
7372 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7374 && (bitpos % bitsize) == 0
7375 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7376 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7378 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7382 op0 = offset_address (op0, offset_rtx,
7383 highest_pow2_factor (offset));
7386 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7387 record its alignment as BIGGEST_ALIGNMENT. */
7388 if (GET_CODE (op0) == MEM && bitpos == 0 && offset != 0
7389 && is_aligning_offset (offset, tem))
7390 set_mem_align (op0, BIGGEST_ALIGNMENT);
7392 /* Don't forget about volatility even if this is a bitfield. */
7393 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7395 if (op0 == orig_op0)
7396 op0 = copy_rtx (op0);
7398 MEM_VOLATILE_P (op0) = 1;
7401 /* The following code doesn't handle CONCAT.
7402 Assume only bitpos == 0 can be used for CONCAT, due to
7403 one element arrays having the same mode as its element. */
7404 if (GET_CODE (op0) == CONCAT)
7406 if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
7411 /* In cases where an aligned union has an unaligned object
7412 as a field, we might be extracting a BLKmode value from
7413 an integer-mode (e.g., SImode) object. Handle this case
7414 by doing the extract into an object as wide as the field
7415 (which we know to be the width of a basic mode), then
7416 storing into memory, and changing the mode to BLKmode. */
7417 if (mode1 == VOIDmode
7418 || REG_P (op0) || GET_CODE (op0) == SUBREG
7419 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7420 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7421 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7422 && modifier != EXPAND_CONST_ADDRESS
7423 && modifier != EXPAND_INITIALIZER)
7424 /* If the field isn't aligned enough to fetch as a memref,
7425 fetch it as a bit field. */
7426 || (mode1 != BLKmode
7427 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7428 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
7429 || (GET_CODE (op0) == MEM
7430 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
7431 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
7432 && ((modifier == EXPAND_CONST_ADDRESS
7433 || modifier == EXPAND_INITIALIZER)
7435 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
7436 || (bitpos % BITS_PER_UNIT != 0)))
7437 /* If the type and the field are a constant size and the
7438 size of the type isn't the same size as the bitfield,
7439 we must use bitfield operations. */
7441 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7443 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7446 enum machine_mode ext_mode = mode;
7448 if (ext_mode == BLKmode
7449 && ! (target != 0 && GET_CODE (op0) == MEM
7450 && GET_CODE (target) == MEM
7451 && bitpos % BITS_PER_UNIT == 0))
7452 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7454 if (ext_mode == BLKmode)
7457 target = assign_temp (type, 0, 1, 1);
7462 /* In this case, BITPOS must start at a byte boundary and
7463 TARGET, if specified, must be a MEM. */
7464 if (GET_CODE (op0) != MEM
7465 || (target != 0 && GET_CODE (target) != MEM)
7466 || bitpos % BITS_PER_UNIT != 0)
7469 emit_block_move (target,
7470 adjust_address (op0, VOIDmode,
7471 bitpos / BITS_PER_UNIT),
7472 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7474 (modifier == EXPAND_STACK_PARM
7475 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7480 op0 = validize_mem (op0);
7482 if (GET_CODE (op0) == MEM && REG_P (XEXP (op0, 0)))
7483 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7485 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7486 (modifier == EXPAND_STACK_PARM
7487 ? NULL_RTX : target),
7489 int_size_in_bytes (TREE_TYPE (tem)));
7491 /* If the result is a record type and BITSIZE is narrower than
7492 the mode of OP0, an integral mode, and this is a big endian
7493 machine, we must put the field into the high-order bits. */
7494 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7495 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7496 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7497 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7498 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7502 /* If the result type is BLKmode, store the data into a temporary
7503 of the appropriate type, but with the mode corresponding to the
7504 mode for the data we have (op0's mode). It's tempting to make
7505 this a constant type, since we know it's only being stored once,
7506 but that can cause problems if we are taking the address of this
7507 COMPONENT_REF because the MEM of any reference via that address
7508 will have flags corresponding to the type, which will not
7509 necessarily be constant. */
7510 if (mode == BLKmode)
7513 = assign_stack_temp_for_type
7514 (ext_mode, GET_MODE_BITSIZE (ext_mode), 0, type);
7516 emit_move_insn (new, op0);
7517 op0 = copy_rtx (new);
7518 PUT_MODE (op0, BLKmode);
7519 set_mem_attributes (op0, exp, 1);
7525 /* If the result is BLKmode, use that to access the object
7527 if (mode == BLKmode)
7530 /* Get a reference to just this component. */
7531 if (modifier == EXPAND_CONST_ADDRESS
7532 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7533 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7535 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7537 if (op0 == orig_op0)
7538 op0 = copy_rtx (op0);
7540 set_mem_attributes (op0, exp, 0);
7541 if (REG_P (XEXP (op0, 0)))
7542 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7544 MEM_VOLATILE_P (op0) |= volatilep;
7545 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7546 || modifier == EXPAND_CONST_ADDRESS
7547 || modifier == EXPAND_INITIALIZER)
7549 else if (target == 0)
7550 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7552 convert_move (target, op0, unsignedp);
7557 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
7559 /* Intended for a reference to a buffer of a file-object in Pascal.
7560 But it's not certain that a special tree code will really be
7561 necessary for these. INDIRECT_REF might work for them. */
7567 /* Pascal set IN expression.
7570 rlo = set_low - (set_low%bits_per_word);
7571 the_word = set [ (index - rlo)/bits_per_word ];
7572 bit_index = index % bits_per_word;
7573 bitmask = 1 << bit_index;
7574 return !!(the_word & bitmask); */
7576 tree set = TREE_OPERAND (exp, 0);
7577 tree index = TREE_OPERAND (exp, 1);
7578 int iunsignedp = TYPE_UNSIGNED (TREE_TYPE (index));
7579 tree set_type = TREE_TYPE (set);
7580 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7581 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7582 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7583 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7584 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7585 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7586 rtx setaddr = XEXP (setval, 0);
7587 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7589 rtx diff, quo, rem, addr, bit, result;
7591 /* If domain is empty, answer is no. Likewise if index is constant
7592 and out of bounds. */
7593 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7594 && TREE_CODE (set_low_bound) == INTEGER_CST
7595 && tree_int_cst_lt (set_high_bound, set_low_bound))
7596 || (TREE_CODE (index) == INTEGER_CST
7597 && TREE_CODE (set_low_bound) == INTEGER_CST
7598 && tree_int_cst_lt (index, set_low_bound))
7599 || (TREE_CODE (set_high_bound) == INTEGER_CST
7600 && TREE_CODE (index) == INTEGER_CST
7601 && tree_int_cst_lt (set_high_bound, index))))
7605 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7607 /* If we get here, we have to generate the code for both cases
7608 (in range and out of range). */
7610 op0 = gen_label_rtx ();
7611 op1 = gen_label_rtx ();
7613 if (! (GET_CODE (index_val) == CONST_INT
7614 && GET_CODE (lo_r) == CONST_INT))
7615 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7616 GET_MODE (index_val), iunsignedp, op1);
7618 if (! (GET_CODE (index_val) == CONST_INT
7619 && GET_CODE (hi_r) == CONST_INT))
7620 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7621 GET_MODE (index_val), iunsignedp, op1);
7623 /* Calculate the element number of bit zero in the first word
7625 if (GET_CODE (lo_r) == CONST_INT)
7626 rlow = GEN_INT (INTVAL (lo_r)
7627 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7629 rlow = expand_binop (index_mode, and_optab, lo_r,
7630 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7631 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7633 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7634 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7636 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7637 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7638 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7639 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7641 addr = memory_address (byte_mode,
7642 expand_binop (index_mode, add_optab, diff,
7643 setaddr, NULL_RTX, iunsignedp,
7646 /* Extract the bit we want to examine. */
7647 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7648 gen_rtx_MEM (byte_mode, addr),
7649 make_tree (TREE_TYPE (index), rem),
7651 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7652 GET_MODE (target) == byte_mode ? target : 0,
7653 1, OPTAB_LIB_WIDEN);
7655 if (result != target)
7656 convert_move (target, result, 1);
7658 /* Output the code to handle the out-of-range case. */
7661 emit_move_insn (target, const0_rtx);
7666 case WITH_CLEANUP_EXPR:
7667 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7669 WITH_CLEANUP_EXPR_RTL (exp)
7670 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7671 expand_decl_cleanup_eh (NULL_TREE, TREE_OPERAND (exp, 1),
7672 CLEANUP_EH_ONLY (exp));
7674 /* That's it for this cleanup. */
7675 TREE_OPERAND (exp, 1) = 0;
7677 return WITH_CLEANUP_EXPR_RTL (exp);
7679 case CLEANUP_POINT_EXPR:
7681 /* Start a new binding layer that will keep track of all cleanup
7682 actions to be performed. */
7683 expand_start_bindings (2);
7685 target_temp_slot_level = temp_slot_level;
7687 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7688 /* If we're going to use this value, load it up now. */
7690 op0 = force_not_mem (op0);
7691 preserve_temp_slots (op0);
7692 expand_end_bindings (NULL_TREE, 0, 0);
7697 /* Check for a built-in function. */
7698 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7699 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7701 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7703 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7704 == BUILT_IN_FRONTEND)
7705 return lang_hooks.expand_expr (exp, original_target,
7709 return expand_builtin (exp, target, subtarget, tmode, ignore);
7712 return expand_call (exp, target, ignore);
7714 case NON_LVALUE_EXPR:
7717 case REFERENCE_EXPR:
7718 if (TREE_OPERAND (exp, 0) == error_mark_node)
7721 if (TREE_CODE (type) == UNION_TYPE)
7723 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7725 /* If both input and output are BLKmode, this conversion isn't doing
7726 anything except possibly changing memory attribute. */
7727 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7729 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7732 result = copy_rtx (result);
7733 set_mem_attributes (result, exp, 0);
7739 if (TYPE_MODE (type) != BLKmode)
7740 target = gen_reg_rtx (TYPE_MODE (type));
7742 target = assign_temp (type, 0, 1, 1);
7745 if (GET_CODE (target) == MEM)
7746 /* Store data into beginning of memory target. */
7747 store_expr (TREE_OPERAND (exp, 0),
7748 adjust_address (target, TYPE_MODE (valtype), 0),
7749 modifier == EXPAND_STACK_PARM ? 2 : 0);
7751 else if (REG_P (target))
7752 /* Store this field into a union of the proper type. */
7753 store_field (target,
7754 MIN ((int_size_in_bytes (TREE_TYPE
7755 (TREE_OPERAND (exp, 0)))
7757 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7758 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7759 VOIDmode, 0, type, 0);
7763 /* Return the entire union. */
7767 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7769 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7772 /* If the signedness of the conversion differs and OP0 is
7773 a promoted SUBREG, clear that indication since we now
7774 have to do the proper extension. */
7775 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7776 && GET_CODE (op0) == SUBREG)
7777 SUBREG_PROMOTED_VAR_P (op0) = 0;
7782 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7783 if (GET_MODE (op0) == mode)
7786 /* If OP0 is a constant, just convert it into the proper mode. */
7787 if (CONSTANT_P (op0))
7789 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7790 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7792 if (modifier == EXPAND_INITIALIZER)
7793 return simplify_gen_subreg (mode, op0, inner_mode,
7794 subreg_lowpart_offset (mode,
7797 return convert_modes (mode, inner_mode, op0,
7798 TYPE_UNSIGNED (inner_type));
7801 if (modifier == EXPAND_INITIALIZER)
7802 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7806 convert_to_mode (mode, op0,
7807 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7809 convert_move (target, op0,
7810 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7813 case VIEW_CONVERT_EXPR:
7814 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7816 /* If the input and output modes are both the same, we are done.
7817 Otherwise, if neither mode is BLKmode and both are integral and within
7818 a word, we can use gen_lowpart. If neither is true, make sure the
7819 operand is in memory and convert the MEM to the new mode. */
7820 if (TYPE_MODE (type) == GET_MODE (op0))
7822 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7823 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7824 && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT
7825 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7826 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7827 op0 = gen_lowpart (TYPE_MODE (type), op0);
7828 else if (GET_CODE (op0) != MEM)
7830 /* If the operand is not a MEM, force it into memory. Since we
7831 are going to be be changing the mode of the MEM, don't call
7832 force_const_mem for constants because we don't allow pool
7833 constants to change mode. */
7834 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7836 if (TREE_ADDRESSABLE (exp))
7839 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7841 = assign_stack_temp_for_type
7842 (TYPE_MODE (inner_type),
7843 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7845 emit_move_insn (target, op0);
7849 /* At this point, OP0 is in the correct mode. If the output type is such
7850 that the operand is known to be aligned, indicate that it is.
7851 Otherwise, we need only be concerned about alignment for non-BLKmode
7853 if (GET_CODE (op0) == MEM)
7855 op0 = copy_rtx (op0);
7857 if (TYPE_ALIGN_OK (type))
7858 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7859 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7860 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7862 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7863 HOST_WIDE_INT temp_size
7864 = MAX (int_size_in_bytes (inner_type),
7865 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7866 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7867 temp_size, 0, type);
7868 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7870 if (TREE_ADDRESSABLE (exp))
7873 if (GET_MODE (op0) == BLKmode)
7874 emit_block_move (new_with_op0_mode, op0,
7875 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7876 (modifier == EXPAND_STACK_PARM
7877 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7879 emit_move_insn (new_with_op0_mode, op0);
7884 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7890 this_optab = ! unsignedp && flag_trapv
7891 && (GET_MODE_CLASS (mode) == MODE_INT)
7892 ? addv_optab : add_optab;
7894 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7895 something else, make sure we add the register to the constant and
7896 then to the other thing. This case can occur during strength
7897 reduction and doing it this way will produce better code if the
7898 frame pointer or argument pointer is eliminated.
7900 fold-const.c will ensure that the constant is always in the inner
7901 PLUS_EXPR, so the only case we need to do anything about is if
7902 sp, ap, or fp is our second argument, in which case we must swap
7903 the innermost first argument and our second argument. */
7905 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7906 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7907 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7908 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7909 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7910 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7912 tree t = TREE_OPERAND (exp, 1);
7914 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7915 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7918 /* If the result is to be ptr_mode and we are adding an integer to
7919 something, we might be forming a constant. So try to use
7920 plus_constant. If it produces a sum and we can't accept it,
7921 use force_operand. This allows P = &ARR[const] to generate
7922 efficient code on machines where a SYMBOL_REF is not a valid
7925 If this is an EXPAND_SUM call, always return the sum. */
7926 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7927 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7929 if (modifier == EXPAND_STACK_PARM)
7931 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7932 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7933 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7937 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7939 /* Use immed_double_const to ensure that the constant is
7940 truncated according to the mode of OP1, then sign extended
7941 to a HOST_WIDE_INT. Using the constant directly can result
7942 in non-canonical RTL in a 64x32 cross compile. */
7944 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7946 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7947 op1 = plus_constant (op1, INTVAL (constant_part));
7948 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7949 op1 = force_operand (op1, target);
7953 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7954 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7955 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7959 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7960 (modifier == EXPAND_INITIALIZER
7961 ? EXPAND_INITIALIZER : EXPAND_SUM));
7962 if (! CONSTANT_P (op0))
7964 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7965 VOIDmode, modifier);
7966 /* Return a PLUS if modifier says it's OK. */
7967 if (modifier == EXPAND_SUM
7968 || modifier == EXPAND_INITIALIZER)
7969 return simplify_gen_binary (PLUS, mode, op0, op1);
7972 /* Use immed_double_const to ensure that the constant is
7973 truncated according to the mode of OP1, then sign extended
7974 to a HOST_WIDE_INT. Using the constant directly can result
7975 in non-canonical RTL in a 64x32 cross compile. */
7977 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7979 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7980 op0 = plus_constant (op0, INTVAL (constant_part));
7981 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7982 op0 = force_operand (op0, target);
7987 /* No sense saving up arithmetic to be done
7988 if it's all in the wrong mode to form part of an address.
7989 And force_operand won't know whether to sign-extend or
7991 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7992 || mode != ptr_mode)
7994 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7995 subtarget, &op0, &op1, 0);
7996 if (op0 == const0_rtx)
7998 if (op1 == const0_rtx)
8003 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8004 subtarget, &op0, &op1, modifier);
8005 return simplify_gen_binary (PLUS, mode, op0, op1);
8008 /* For initializers, we are allowed to return a MINUS of two
8009 symbolic constants. Here we handle all cases when both operands
8011 /* Handle difference of two symbolic constants,
8012 for the sake of an initializer. */
8013 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8014 && really_constant_p (TREE_OPERAND (exp, 0))
8015 && really_constant_p (TREE_OPERAND (exp, 1)))
8017 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8018 NULL_RTX, &op0, &op1, modifier);
8020 /* If the last operand is a CONST_INT, use plus_constant of
8021 the negated constant. Else make the MINUS. */
8022 if (GET_CODE (op1) == CONST_INT)
8023 return plus_constant (op0, - INTVAL (op1));
8025 return gen_rtx_MINUS (mode, op0, op1);
8028 this_optab = ! unsignedp && flag_trapv
8029 && (GET_MODE_CLASS(mode) == MODE_INT)
8030 ? subv_optab : sub_optab;
8032 /* No sense saving up arithmetic to be done
8033 if it's all in the wrong mode to form part of an address.
8034 And force_operand won't know whether to sign-extend or
8036 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8037 || mode != ptr_mode)
8040 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8041 subtarget, &op0, &op1, modifier);
8043 /* Convert A - const to A + (-const). */
8044 if (GET_CODE (op1) == CONST_INT)
8046 op1 = negate_rtx (mode, op1);
8047 return simplify_gen_binary (PLUS, mode, op0, op1);
8053 /* If first operand is constant, swap them.
8054 Thus the following special case checks need only
8055 check the second operand. */
8056 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
8058 tree t1 = TREE_OPERAND (exp, 0);
8059 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
8060 TREE_OPERAND (exp, 1) = t1;
8063 /* Attempt to return something suitable for generating an
8064 indexed address, for machines that support that. */
8066 if (modifier == EXPAND_SUM && mode == ptr_mode
8067 && host_integerp (TREE_OPERAND (exp, 1), 0))
8069 tree exp1 = TREE_OPERAND (exp, 1);
8071 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8075 op0 = force_operand (op0, NULL_RTX);
8077 op0 = copy_to_mode_reg (mode, op0);
8079 return gen_rtx_MULT (mode, op0,
8080 gen_int_mode (tree_low_cst (exp1, 0),
8081 TYPE_MODE (TREE_TYPE (exp1))));
8084 if (modifier == EXPAND_STACK_PARM)
8087 /* Check for multiplying things that have been extended
8088 from a narrower type. If this machine supports multiplying
8089 in that narrower type with a result in the desired type,
8090 do it that way, and avoid the explicit type-conversion. */
8091 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
8092 && TREE_CODE (type) == INTEGER_TYPE
8093 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8094 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8095 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8096 && int_fits_type_p (TREE_OPERAND (exp, 1),
8097 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8098 /* Don't use a widening multiply if a shift will do. */
8099 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8100 > HOST_BITS_PER_WIDE_INT)
8101 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
8103 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8104 && (TYPE_PRECISION (TREE_TYPE
8105 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8106 == TYPE_PRECISION (TREE_TYPE
8108 (TREE_OPERAND (exp, 0), 0))))
8109 /* If both operands are extended, they must either both
8110 be zero-extended or both be sign-extended. */
8111 && (TYPE_UNSIGNED (TREE_TYPE
8112 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8113 == TYPE_UNSIGNED (TREE_TYPE
8115 (TREE_OPERAND (exp, 0), 0)))))))
8117 tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
8118 enum machine_mode innermode = TYPE_MODE (op0type);
8119 bool zextend_p = TYPE_UNSIGNED (op0type);
8120 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
8121 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
8123 if (mode == GET_MODE_WIDER_MODE (innermode))
8125 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
8127 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8128 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8129 TREE_OPERAND (exp, 1),
8130 NULL_RTX, &op0, &op1, 0);
8132 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8133 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8134 NULL_RTX, &op0, &op1, 0);
8137 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
8138 && innermode == word_mode)
8141 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8142 NULL_RTX, VOIDmode, 0);
8143 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8144 op1 = convert_modes (innermode, mode,
8145 expand_expr (TREE_OPERAND (exp, 1),
8146 NULL_RTX, VOIDmode, 0),
8149 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8150 NULL_RTX, VOIDmode, 0);
8151 temp = expand_binop (mode, other_optab, op0, op1, target,
8152 unsignedp, OPTAB_LIB_WIDEN);
8153 hipart = gen_highpart (innermode, temp);
8154 htem = expand_mult_highpart_adjust (innermode, hipart,
8158 emit_move_insn (hipart, htem);
8163 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8164 subtarget, &op0, &op1, 0);
8165 return expand_mult (mode, op0, op1, target, unsignedp);
8167 case TRUNC_DIV_EXPR:
8168 case FLOOR_DIV_EXPR:
8170 case ROUND_DIV_EXPR:
8171 case EXACT_DIV_EXPR:
8172 if (modifier == EXPAND_STACK_PARM)
8174 /* Possible optimization: compute the dividend with EXPAND_SUM
8175 then if the divisor is constant can optimize the case
8176 where some terms of the dividend have coeffs divisible by it. */
8177 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8178 subtarget, &op0, &op1, 0);
8179 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8182 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
8183 expensive divide. If not, combine will rebuild the original
8185 if (flag_unsafe_math_optimizations && optimize && !optimize_size
8186 && TREE_CODE (type) == REAL_TYPE
8187 && !real_onep (TREE_OPERAND (exp, 0)))
8188 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
8189 build (RDIV_EXPR, type,
8190 build_real (type, dconst1),
8191 TREE_OPERAND (exp, 1))),
8192 target, tmode, modifier);
8193 this_optab = sdiv_optab;
8196 case TRUNC_MOD_EXPR:
8197 case FLOOR_MOD_EXPR:
8199 case ROUND_MOD_EXPR:
8200 if (modifier == EXPAND_STACK_PARM)
8202 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8203 subtarget, &op0, &op1, 0);
8204 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8206 case FIX_ROUND_EXPR:
8207 case FIX_FLOOR_EXPR:
8209 abort (); /* Not used for C. */
8211 case FIX_TRUNC_EXPR:
8212 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8213 if (target == 0 || modifier == EXPAND_STACK_PARM)
8214 target = gen_reg_rtx (mode);
8215 expand_fix (target, op0, unsignedp);
8219 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8220 if (target == 0 || modifier == EXPAND_STACK_PARM)
8221 target = gen_reg_rtx (mode);
8222 /* expand_float can't figure out what to do if FROM has VOIDmode.
8223 So give it the correct mode. With -O, cse will optimize this. */
8224 if (GET_MODE (op0) == VOIDmode)
8225 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8227 expand_float (target, op0,
8228 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8232 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8233 if (modifier == EXPAND_STACK_PARM)
8235 temp = expand_unop (mode,
8236 ! unsignedp && flag_trapv
8237 && (GET_MODE_CLASS(mode) == MODE_INT)
8238 ? negv_optab : neg_optab, op0, target, 0);
8244 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8245 if (modifier == EXPAND_STACK_PARM)
8248 /* ABS_EXPR is not valid for complex arguments. */
8249 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
8250 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
8253 /* Unsigned abs is simply the operand. Testing here means we don't
8254 risk generating incorrect code below. */
8255 if (TYPE_UNSIGNED (type))
8258 return expand_abs (mode, op0, target, unsignedp,
8259 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8263 target = original_target;
8265 || modifier == EXPAND_STACK_PARM
8266 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
8267 || GET_MODE (target) != mode
8269 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8270 target = gen_reg_rtx (mode);
8271 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8272 target, &op0, &op1, 0);
8274 /* First try to do it with a special MIN or MAX instruction.
8275 If that does not win, use a conditional jump to select the proper
8277 this_optab = (unsignedp
8278 ? (code == MIN_EXPR ? umin_optab : umax_optab)
8279 : (code == MIN_EXPR ? smin_optab : smax_optab));
8281 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8286 /* At this point, a MEM target is no longer useful; we will get better
8289 if (GET_CODE (target) == MEM)
8290 target = gen_reg_rtx (mode);
8292 /* If op1 was placed in target, swap op0 and op1. */
8293 if (target != op0 && target == op1)
8301 emit_move_insn (target, op0);
8303 op0 = gen_label_rtx ();
8305 /* If this mode is an integer too wide to compare properly,
8306 compare word by word. Rely on cse to optimize constant cases. */
8307 if (GET_MODE_CLASS (mode) == MODE_INT
8308 && ! can_compare_p (GE, mode, ccp_jump))
8310 if (code == MAX_EXPR)
8311 do_jump_by_parts_greater_rtx (mode, unsignedp, target, op1,
8314 do_jump_by_parts_greater_rtx (mode, unsignedp, op1, target,
8319 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
8320 unsignedp, mode, NULL_RTX, NULL_RTX, op0);
8322 emit_move_insn (target, op1);
8327 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8328 if (modifier == EXPAND_STACK_PARM)
8330 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8335 /* ??? Can optimize bitwise operations with one arg constant.
8336 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8337 and (a bitwise1 b) bitwise2 b (etc)
8338 but that is probably not worth while. */
8340 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8341 boolean values when we want in all cases to compute both of them. In
8342 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8343 as actual zero-or-1 values and then bitwise anding. In cases where
8344 there cannot be any side effects, better code would be made by
8345 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8346 how to recognize those cases. */
8348 case TRUTH_AND_EXPR:
8350 this_optab = and_optab;
8355 this_optab = ior_optab;
8358 case TRUTH_XOR_EXPR:
8360 this_optab = xor_optab;
8367 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8369 if (modifier == EXPAND_STACK_PARM)
8371 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8372 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8375 /* Could determine the answer when only additive constants differ. Also,
8376 the addition of one can be handled by changing the condition. */
8383 case UNORDERED_EXPR:
8391 temp = do_store_flag (exp,
8392 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8393 tmode != VOIDmode ? tmode : mode, 0);
8397 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8398 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8400 && REG_P (original_target)
8401 && (GET_MODE (original_target)
8402 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8404 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8407 /* If temp is constant, we can just compute the result. */
8408 if (GET_CODE (temp) == CONST_INT)
8410 if (INTVAL (temp) != 0)
8411 emit_move_insn (target, const1_rtx);
8413 emit_move_insn (target, const0_rtx);
8418 if (temp != original_target)
8420 enum machine_mode mode1 = GET_MODE (temp);
8421 if (mode1 == VOIDmode)
8422 mode1 = tmode != VOIDmode ? tmode : mode;
8424 temp = copy_to_mode_reg (mode1, temp);
8427 op1 = gen_label_rtx ();
8428 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8429 GET_MODE (temp), unsignedp, op1);
8430 emit_move_insn (temp, const1_rtx);
8435 /* If no set-flag instruction, must generate a conditional
8436 store into a temporary variable. Drop through
8437 and handle this like && and ||. */
8439 case TRUTH_ANDIF_EXPR:
8440 case TRUTH_ORIF_EXPR:
8443 || modifier == EXPAND_STACK_PARM
8444 || ! safe_from_p (target, exp, 1)
8445 /* Make sure we don't have a hard reg (such as function's return
8446 value) live across basic blocks, if not optimizing. */
8447 || (!optimize && REG_P (target)
8448 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8449 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8452 emit_clr_insn (target);
8454 op1 = gen_label_rtx ();
8455 jumpifnot (exp, op1);
8458 emit_0_to_1_insn (target);
8461 return ignore ? const0_rtx : target;
8463 case TRUTH_NOT_EXPR:
8464 if (modifier == EXPAND_STACK_PARM)
8466 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8467 /* The parser is careful to generate TRUTH_NOT_EXPR
8468 only with operands that are always zero or one. */
8469 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8470 target, 1, OPTAB_LIB_WIDEN);
8476 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8478 return expand_expr_real (TREE_OPERAND (exp, 1),
8479 (ignore ? const0_rtx : target),
8480 VOIDmode, modifier, alt_rtl);
8482 case STATEMENT_LIST:
8484 tree_stmt_iterator iter;
8489 for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
8490 expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
8495 /* If it's void, we don't need to worry about computing a value. */
8496 if (VOID_TYPE_P (TREE_TYPE (exp)))
8498 tree pred = TREE_OPERAND (exp, 0);
8499 tree then_ = TREE_OPERAND (exp, 1);
8500 tree else_ = TREE_OPERAND (exp, 2);
8502 /* If we do not have any pending cleanups or stack_levels
8503 to restore, and at least one arm of the COND_EXPR is a
8504 GOTO_EXPR to a local label, then we can emit more efficient
8505 code by using jumpif/jumpifnot instead of the 'if' machinery. */
8507 || containing_blocks_have_cleanups_or_stack_level ())
8509 else if (TREE_CODE (then_) == GOTO_EXPR
8510 && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL)
8512 jumpif (pred, label_rtx (GOTO_DESTINATION (then_)));
8513 return expand_expr (else_, const0_rtx, VOIDmode, 0);
8515 else if (TREE_CODE (else_) == GOTO_EXPR
8516 && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL)
8518 jumpifnot (pred, label_rtx (GOTO_DESTINATION (else_)));
8519 return expand_expr (then_, const0_rtx, VOIDmode, 0);
8522 /* Just use the 'if' machinery. */
8523 expand_start_cond (pred, 0);
8524 start_cleanup_deferral ();
8525 expand_expr (then_, const0_rtx, VOIDmode, 0);
8529 /* Iterate over 'else if's instead of recursing. */
8530 for (; TREE_CODE (exp) == COND_EXPR; exp = TREE_OPERAND (exp, 2))
8532 expand_start_else ();
8533 if (EXPR_HAS_LOCATION (exp))
8535 emit_line_note (EXPR_LOCATION (exp));
8536 record_block_change (TREE_BLOCK (exp));
8538 expand_elseif (TREE_OPERAND (exp, 0));
8539 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, 0);
8541 /* Don't emit the jump and label if there's no 'else' clause. */
8542 if (TREE_SIDE_EFFECTS (exp))
8544 expand_start_else ();
8545 expand_expr (exp, const0_rtx, VOIDmode, 0);
8547 end_cleanup_deferral ();
8552 /* If we would have a "singleton" (see below) were it not for a
8553 conversion in each arm, bring that conversion back out. */
8554 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8555 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8556 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8557 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8559 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8560 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8562 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8563 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8564 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8565 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8566 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8567 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8568 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8569 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8570 return expand_expr (build1 (NOP_EXPR, type,
8571 build (COND_EXPR, TREE_TYPE (iftrue),
8572 TREE_OPERAND (exp, 0),
8574 target, tmode, modifier);
8578 /* Note that COND_EXPRs whose type is a structure or union
8579 are required to be constructed to contain assignments of
8580 a temporary variable, so that we can evaluate them here
8581 for side effect only. If type is void, we must do likewise. */
8583 /* If an arm of the branch requires a cleanup,
8584 only that cleanup is performed. */
8587 tree binary_op = 0, unary_op = 0;
8589 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8590 convert it to our mode, if necessary. */
8591 if (integer_onep (TREE_OPERAND (exp, 1))
8592 && integer_zerop (TREE_OPERAND (exp, 2))
8593 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8597 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8602 if (modifier == EXPAND_STACK_PARM)
8604 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
8605 if (GET_MODE (op0) == mode)
8609 target = gen_reg_rtx (mode);
8610 convert_move (target, op0, unsignedp);
8614 /* Check for X ? A + B : A. If we have this, we can copy A to the
8615 output and conditionally add B. Similarly for unary operations.
8616 Don't do this if X has side-effects because those side effects
8617 might affect A or B and the "?" operation is a sequence point in
8618 ANSI. (operand_equal_p tests for side effects.) */
8620 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8621 && operand_equal_p (TREE_OPERAND (exp, 2),
8622 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8623 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8624 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8625 && operand_equal_p (TREE_OPERAND (exp, 1),
8626 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8627 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8628 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8629 && operand_equal_p (TREE_OPERAND (exp, 2),
8630 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8631 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8632 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8633 && operand_equal_p (TREE_OPERAND (exp, 1),
8634 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8635 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8637 /* If we are not to produce a result, we have no target. Otherwise,
8638 if a target was specified use it; it will not be used as an
8639 intermediate target unless it is safe. If no target, use a
8644 else if (modifier == EXPAND_STACK_PARM)
8645 temp = assign_temp (type, 0, 0, 1);
8646 else if (original_target
8647 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8648 || (singleton && REG_P (original_target)
8649 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8650 && original_target == var_rtx (singleton)))
8651 && GET_MODE (original_target) == mode
8652 #ifdef HAVE_conditional_move
8653 && (! can_conditionally_move_p (mode)
8654 || REG_P (original_target)
8655 || TREE_ADDRESSABLE (type))
8657 && (GET_CODE (original_target) != MEM
8658 || TREE_ADDRESSABLE (type)))
8659 temp = original_target;
8660 else if (TREE_ADDRESSABLE (type))
8663 temp = assign_temp (type, 0, 0, 1);
8665 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8666 do the test of X as a store-flag operation, do this as
8667 A + ((X != 0) << log C). Similarly for other simple binary
8668 operators. Only do for C == 1 if BRANCH_COST is low. */
8669 if (temp && singleton && binary_op
8670 && (TREE_CODE (binary_op) == PLUS_EXPR
8671 || TREE_CODE (binary_op) == MINUS_EXPR
8672 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8673 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8674 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8675 : integer_onep (TREE_OPERAND (binary_op, 1)))
8676 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8680 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8681 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8682 ? addv_optab : add_optab)
8683 : TREE_CODE (binary_op) == MINUS_EXPR
8684 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8685 ? subv_optab : sub_optab)
8686 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8689 /* If we had X ? A : A + 1, do this as A + (X == 0). */
8690 if (singleton == TREE_OPERAND (exp, 1))
8691 cond = invert_truthvalue (TREE_OPERAND (exp, 0));
8693 cond = TREE_OPERAND (exp, 0);
8695 result = do_store_flag (cond, (safe_from_p (temp, singleton, 1)
8697 mode, BRANCH_COST <= 1);
8699 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8700 result = expand_shift (LSHIFT_EXPR, mode, result,
8701 build_int_2 (tree_log2
8705 (safe_from_p (temp, singleton, 1)
8706 ? temp : NULL_RTX), 0);
8710 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8711 return expand_binop (mode, boptab, op1, result, temp,
8712 unsignedp, OPTAB_LIB_WIDEN);
8716 do_pending_stack_adjust ();
8718 op0 = gen_label_rtx ();
8720 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8724 /* If the target conflicts with the other operand of the
8725 binary op, we can't use it. Also, we can't use the target
8726 if it is a hard register, because evaluating the condition
8727 might clobber it. */
8729 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8731 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8732 temp = gen_reg_rtx (mode);
8733 store_expr (singleton, temp,
8734 modifier == EXPAND_STACK_PARM ? 2 : 0);
8737 expand_expr (singleton,
8738 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8739 if (singleton == TREE_OPERAND (exp, 1))
8740 jumpif (TREE_OPERAND (exp, 0), op0);
8742 jumpifnot (TREE_OPERAND (exp, 0), op0);
8744 start_cleanup_deferral ();
8745 if (binary_op && temp == 0)
8746 /* Just touch the other operand. */
8747 expand_expr (TREE_OPERAND (binary_op, 1),
8748 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8750 store_expr (build (TREE_CODE (binary_op), type,
8751 make_tree (type, temp),
8752 TREE_OPERAND (binary_op, 1)),
8753 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8755 store_expr (build1 (TREE_CODE (unary_op), type,
8756 make_tree (type, temp)),
8757 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8760 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8761 comparison operator. If we have one of these cases, set the
8762 output to A, branch on A (cse will merge these two references),
8763 then set the output to FOO. */
8765 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8766 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8767 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8768 TREE_OPERAND (exp, 1), 0)
8769 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8770 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8771 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8774 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8775 temp = gen_reg_rtx (mode);
8776 store_expr (TREE_OPERAND (exp, 1), temp,
8777 modifier == EXPAND_STACK_PARM ? 2 : 0);
8778 jumpif (TREE_OPERAND (exp, 0), op0);
8780 start_cleanup_deferral ();
8781 if (TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8782 store_expr (TREE_OPERAND (exp, 2), temp,
8783 modifier == EXPAND_STACK_PARM ? 2 : 0);
8785 expand_expr (TREE_OPERAND (exp, 2),
8786 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8790 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8791 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8792 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8793 TREE_OPERAND (exp, 2), 0)
8794 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8795 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8796 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8799 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8800 temp = gen_reg_rtx (mode);
8801 store_expr (TREE_OPERAND (exp, 2), temp,
8802 modifier == EXPAND_STACK_PARM ? 2 : 0);
8803 jumpifnot (TREE_OPERAND (exp, 0), op0);
8805 start_cleanup_deferral ();
8806 if (TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8807 store_expr (TREE_OPERAND (exp, 1), temp,
8808 modifier == EXPAND_STACK_PARM ? 2 : 0);
8810 expand_expr (TREE_OPERAND (exp, 1),
8811 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8816 op1 = gen_label_rtx ();
8817 jumpifnot (TREE_OPERAND (exp, 0), op0);
8819 start_cleanup_deferral ();
8821 /* One branch of the cond can be void, if it never returns. For
8822 example A ? throw : E */
8824 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8825 store_expr (TREE_OPERAND (exp, 1), temp,
8826 modifier == EXPAND_STACK_PARM ? 2 : 0);
8828 expand_expr (TREE_OPERAND (exp, 1),
8829 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8830 end_cleanup_deferral ();
8832 emit_jump_insn (gen_jump (op1));
8835 start_cleanup_deferral ();
8837 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8838 store_expr (TREE_OPERAND (exp, 2), temp,
8839 modifier == EXPAND_STACK_PARM ? 2 : 0);
8841 expand_expr (TREE_OPERAND (exp, 2),
8842 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8845 end_cleanup_deferral ();
8856 /* Something needs to be initialized, but we didn't know
8857 where that thing was when building the tree. For example,
8858 it could be the return value of a function, or a parameter
8859 to a function which lays down in the stack, or a temporary
8860 variable which must be passed by reference.
8862 We guarantee that the expression will either be constructed
8863 or copied into our original target. */
8865 tree slot = TREE_OPERAND (exp, 0);
8866 tree cleanups = NULL_TREE;
8869 if (TREE_CODE (slot) != VAR_DECL)
8873 target = original_target;
8875 /* Set this here so that if we get a target that refers to a
8876 register variable that's already been used, put_reg_into_stack
8877 knows that it should fix up those uses. */
8878 TREE_USED (slot) = 1;
8882 if (DECL_RTL_SET_P (slot))
8884 target = DECL_RTL (slot);
8885 /* If we have already expanded the slot, so don't do
8887 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8892 target = assign_temp (type, 2, 0, 1);
8893 SET_DECL_RTL (slot, target);
8894 if (TREE_ADDRESSABLE (slot))
8895 put_var_into_stack (slot, /*rescan=*/false);
8897 /* Since SLOT is not known to the called function
8898 to belong to its stack frame, we must build an explicit
8899 cleanup. This case occurs when we must build up a reference
8900 to pass the reference as an argument. In this case,
8901 it is very likely that such a reference need not be
8904 if (TREE_OPERAND (exp, 2) == 0)
8905 TREE_OPERAND (exp, 2)
8906 = lang_hooks.maybe_build_cleanup (slot);
8907 cleanups = TREE_OPERAND (exp, 2);
8912 /* This case does occur, when expanding a parameter which
8913 needs to be constructed on the stack. The target
8914 is the actual stack address that we want to initialize.
8915 The function we call will perform the cleanup in this case. */
8917 /* If we have already assigned it space, use that space,
8918 not target that we were passed in, as our target
8919 parameter is only a hint. */
8920 if (DECL_RTL_SET_P (slot))
8922 target = DECL_RTL (slot);
8923 /* If we have already expanded the slot, so don't do
8925 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8930 SET_DECL_RTL (slot, target);
8931 /* If we must have an addressable slot, then make sure that
8932 the RTL that we just stored in slot is OK. */
8933 if (TREE_ADDRESSABLE (slot))
8934 put_var_into_stack (slot, /*rescan=*/true);
8938 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8939 /* Mark it as expanded. */
8940 TREE_OPERAND (exp, 1) = NULL_TREE;
8942 if (VOID_TYPE_P (TREE_TYPE (exp1)))
8943 /* If the initializer is void, just expand it; it will initialize
8944 the object directly. */
8945 expand_expr (exp1, const0_rtx, VOIDmode, 0);
8947 store_expr (exp1, target, modifier == EXPAND_STACK_PARM ? 2 : 0);
8949 expand_decl_cleanup_eh (NULL_TREE, cleanups, CLEANUP_EH_ONLY (exp));
8956 tree lhs = TREE_OPERAND (exp, 0);
8957 tree rhs = TREE_OPERAND (exp, 1);
8959 temp = expand_assignment (lhs, rhs, ! ignore);
8965 /* If lhs is complex, expand calls in rhs before computing it.
8966 That's so we don't compute a pointer and save it over a
8967 call. If lhs is simple, compute it first so we can give it
8968 as a target if the rhs is just a call. This avoids an
8969 extra temp and copy and that prevents a partial-subsumption
8970 which makes bad code. Actually we could treat
8971 component_ref's of vars like vars. */
8973 tree lhs = TREE_OPERAND (exp, 0);
8974 tree rhs = TREE_OPERAND (exp, 1);
8978 /* Check for |= or &= of a bitfield of size one into another bitfield
8979 of size 1. In this case, (unless we need the result of the
8980 assignment) we can do this more efficiently with a
8981 test followed by an assignment, if necessary.
8983 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8984 things change so we do, this code should be enhanced to
8987 && TREE_CODE (lhs) == COMPONENT_REF
8988 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8989 || TREE_CODE (rhs) == BIT_AND_EXPR)
8990 && TREE_OPERAND (rhs, 0) == lhs
8991 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8992 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8993 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8995 rtx label = gen_label_rtx ();
8997 do_jump (TREE_OPERAND (rhs, 1),
8998 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8999 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
9000 expand_assignment (lhs, convert (TREE_TYPE (rhs),
9001 (TREE_CODE (rhs) == BIT_IOR_EXPR
9003 : integer_zero_node)),
9005 do_pending_stack_adjust ();
9010 temp = expand_assignment (lhs, rhs, ! ignore);
9016 if (!TREE_OPERAND (exp, 0))
9017 expand_null_return ();
9019 expand_return (TREE_OPERAND (exp, 0));
9022 case PREINCREMENT_EXPR:
9023 case PREDECREMENT_EXPR:
9024 return expand_increment (exp, 0, ignore);
9026 case POSTINCREMENT_EXPR:
9027 case POSTDECREMENT_EXPR:
9028 /* Faster to treat as pre-increment if result is not used. */
9029 return expand_increment (exp, ! ignore, ignore);
9032 if (modifier == EXPAND_STACK_PARM)
9034 /* If we are taking the address of something erroneous, just
9036 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
9038 /* If we are taking the address of a constant and are at the
9039 top level, we have to use output_constant_def since we can't
9040 call force_const_mem at top level. */
9042 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
9043 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
9045 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
9048 /* We make sure to pass const0_rtx down if we came in with
9049 ignore set, to avoid doing the cleanups twice for something. */
9050 op0 = expand_expr (TREE_OPERAND (exp, 0),
9051 ignore ? const0_rtx : NULL_RTX, VOIDmode,
9052 (modifier == EXPAND_INITIALIZER
9053 ? modifier : EXPAND_CONST_ADDRESS));
9055 /* If we are going to ignore the result, OP0 will have been set
9056 to const0_rtx, so just return it. Don't get confused and
9057 think we are taking the address of the constant. */
9061 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
9062 clever and returns a REG when given a MEM. */
9063 op0 = protect_from_queue (op0, 1);
9065 /* We would like the object in memory. If it is a constant, we can
9066 have it be statically allocated into memory. For a non-constant,
9067 we need to allocate some memory and store the value into it. */
9069 if (CONSTANT_P (op0))
9070 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
9072 else if (REG_P (op0) || GET_CODE (op0) == SUBREG
9073 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
9074 || GET_CODE (op0) == PARALLEL || GET_CODE (op0) == LO_SUM)
9076 /* If the operand is a SAVE_EXPR, we can deal with this by
9077 forcing the SAVE_EXPR into memory. */
9078 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
9080 put_var_into_stack (TREE_OPERAND (exp, 0),
9082 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
9086 /* If this object is in a register, it can't be BLKmode. */
9087 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9088 rtx memloc = assign_temp (inner_type, 1, 1, 1);
9090 if (GET_CODE (op0) == PARALLEL)
9091 /* Handle calls that pass values in multiple
9092 non-contiguous locations. The Irix 6 ABI has examples
9094 emit_group_store (memloc, op0, inner_type,
9095 int_size_in_bytes (inner_type));
9097 emit_move_insn (memloc, op0);
9103 if (GET_CODE (op0) != MEM)
9106 mark_temp_addr_taken (op0);
9107 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
9109 op0 = XEXP (op0, 0);
9110 if (GET_MODE (op0) == Pmode && mode == ptr_mode)
9111 op0 = convert_memory_address (ptr_mode, op0);
9115 /* If OP0 is not aligned as least as much as the type requires, we
9116 need to make a temporary, copy OP0 to it, and take the address of
9117 the temporary. We want to use the alignment of the type, not of
9118 the operand. Note that this is incorrect for FUNCTION_TYPE, but
9119 the test for BLKmode means that can't happen. The test for
9120 BLKmode is because we never make mis-aligned MEMs with
9123 We don't need to do this at all if the machine doesn't have
9124 strict alignment. */
9125 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
9126 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
9128 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
9130 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9133 if (TYPE_ALIGN_OK (inner_type))
9136 if (TREE_ADDRESSABLE (inner_type))
9138 /* We can't make a bitwise copy of this object, so fail. */
9139 error ("cannot take the address of an unaligned member");
9143 new = assign_stack_temp_for_type
9144 (TYPE_MODE (inner_type),
9145 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
9146 : int_size_in_bytes (inner_type),
9147 1, build_qualified_type (inner_type,
9148 (TYPE_QUALS (inner_type)
9149 | TYPE_QUAL_CONST)));
9151 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)),
9152 (modifier == EXPAND_STACK_PARM
9153 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
9158 op0 = force_operand (XEXP (op0, 0), target);
9163 && modifier != EXPAND_CONST_ADDRESS
9164 && modifier != EXPAND_INITIALIZER
9165 && modifier != EXPAND_SUM)
9166 op0 = force_reg (Pmode, op0);
9169 && ! REG_USERVAR_P (op0))
9170 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
9172 if (GET_MODE (op0) == Pmode && mode == ptr_mode)
9173 op0 = convert_memory_address (ptr_mode, op0);
9177 case ENTRY_VALUE_EXPR:
9180 /* COMPLEX type for Extended Pascal & Fortran */
9183 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9186 /* Get the rtx code of the operands. */
9187 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9188 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
9191 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
9195 /* Move the real (op0) and imaginary (op1) parts to their location. */
9196 emit_move_insn (gen_realpart (mode, target), op0);
9197 emit_move_insn (gen_imagpart (mode, target), op1);
9199 insns = get_insns ();
9202 /* Complex construction should appear as a single unit. */
9203 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
9204 each with a separate pseudo as destination.
9205 It's not correct for flow to treat them as a unit. */
9206 if (GET_CODE (target) != CONCAT)
9207 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
9215 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9216 return gen_realpart (mode, op0);
9219 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9220 return gen_imagpart (mode, op0);
9224 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9228 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9231 target = gen_reg_rtx (mode);
9235 /* Store the realpart and the negated imagpart to target. */
9236 emit_move_insn (gen_realpart (partmode, target),
9237 gen_realpart (partmode, op0));
9239 imag_t = gen_imagpart (partmode, target);
9240 temp = expand_unop (partmode,
9241 ! unsignedp && flag_trapv
9242 && (GET_MODE_CLASS(partmode) == MODE_INT)
9243 ? negv_optab : neg_optab,
9244 gen_imagpart (partmode, op0), imag_t, 0);
9246 emit_move_insn (imag_t, temp);
9248 insns = get_insns ();
9251 /* Conjugate should appear as a single unit
9252 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
9253 each with a separate pseudo as destination.
9254 It's not correct for flow to treat them as a unit. */
9255 if (GET_CODE (target) != CONCAT)
9256 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
9264 expand_resx_expr (exp);
9267 case TRY_CATCH_EXPR:
9269 tree handler = TREE_OPERAND (exp, 1);
9271 expand_eh_region_start ();
9272 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9273 expand_eh_handler (handler);
9279 expand_start_catch (CATCH_TYPES (exp));
9280 expand_expr (CATCH_BODY (exp), const0_rtx, VOIDmode, 0);
9281 expand_end_catch ();
9284 case EH_FILTER_EXPR:
9285 /* Should have been handled in expand_eh_handler. */
9288 case TRY_FINALLY_EXPR:
9290 tree try_block = TREE_OPERAND (exp, 0);
9291 tree finally_block = TREE_OPERAND (exp, 1);
9293 if ((!optimize && lang_protect_cleanup_actions == NULL)
9294 || unsafe_for_reeval (finally_block) > 1)
9296 /* In this case, wrapping FINALLY_BLOCK in an UNSAVE_EXPR
9297 is not sufficient, so we cannot expand the block twice.
9298 So we play games with GOTO_SUBROUTINE_EXPR to let us
9299 expand the thing only once. */
9300 /* When not optimizing, we go ahead with this form since
9301 (1) user breakpoints operate more predictably without
9302 code duplication, and
9303 (2) we're not running any of the global optimizers
9304 that would explode in time/space with the highly
9305 connected CFG created by the indirect branching. */
9307 rtx finally_label = gen_label_rtx ();
9308 rtx done_label = gen_label_rtx ();
9309 rtx return_link = gen_reg_rtx (Pmode);
9310 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
9311 (tree) finally_label, (tree) return_link);
9312 TREE_SIDE_EFFECTS (cleanup) = 1;
9314 /* Start a new binding layer that will keep track of all cleanup
9315 actions to be performed. */
9316 expand_start_bindings (2);
9317 target_temp_slot_level = temp_slot_level;
9319 expand_decl_cleanup (NULL_TREE, cleanup);
9320 op0 = expand_expr (try_block, target, tmode, modifier);
9322 preserve_temp_slots (op0);
9323 expand_end_bindings (NULL_TREE, 0, 0);
9324 emit_jump (done_label);
9325 emit_label (finally_label);
9326 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
9327 emit_indirect_jump (return_link);
9328 emit_label (done_label);
9332 expand_start_bindings (2);
9333 target_temp_slot_level = temp_slot_level;
9335 expand_decl_cleanup (NULL_TREE, finally_block);
9336 op0 = expand_expr (try_block, target, tmode, modifier);
9338 preserve_temp_slots (op0);
9339 expand_end_bindings (NULL_TREE, 0, 0);
9345 case GOTO_SUBROUTINE_EXPR:
9347 rtx subr = (rtx) TREE_OPERAND (exp, 0);
9348 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
9349 rtx return_address = gen_label_rtx ();
9350 emit_move_insn (return_link,
9351 gen_rtx_LABEL_REF (Pmode, return_address));
9353 emit_label (return_address);
9358 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
9361 return get_exception_pointer (cfun);
9364 return get_exception_filter (cfun);
9367 /* Function descriptors are not valid except for as
9368 initialization constants, and should not be expanded. */
9372 expand_start_case (0, SWITCH_COND (exp), integer_type_node,
9374 if (SWITCH_BODY (exp))
9375 expand_expr_stmt (SWITCH_BODY (exp));
9376 if (SWITCH_LABELS (exp))
9379 tree vec = SWITCH_LABELS (exp);
9380 size_t i, n = TREE_VEC_LENGTH (vec);
9382 for (i = 0; i < n; ++i)
9384 tree elt = TREE_VEC_ELT (vec, i);
9385 tree controlling_expr_type = TREE_TYPE (SWITCH_COND (exp));
9386 tree min_value = TYPE_MIN_VALUE (controlling_expr_type);
9387 tree max_value = TYPE_MAX_VALUE (controlling_expr_type);
9389 tree case_low = CASE_LOW (elt);
9390 tree case_high = CASE_HIGH (elt) ? CASE_HIGH (elt) : case_low;
9391 if (case_low && case_high)
9393 /* Case label is less than minimum for type. */
9394 if ((tree_int_cst_compare (case_low, min_value) < 0)
9395 && (tree_int_cst_compare (case_high, min_value) < 0))
9397 warning ("case label value %d is less than minimum value for type",
9398 TREE_INT_CST (case_low));
9402 /* Case value is greater than maximum for type. */
9403 if ((tree_int_cst_compare (case_low, max_value) > 0)
9404 && (tree_int_cst_compare (case_high, max_value) > 0))
9406 warning ("case label value %d exceeds maximum value for type",
9407 TREE_INT_CST (case_high));
9411 /* Saturate lower case label value to minimum. */
9412 if ((tree_int_cst_compare (case_high, min_value) >= 0)
9413 && (tree_int_cst_compare (case_low, min_value) < 0))
9415 warning ("lower value %d in case label range less than minimum value for type",
9416 TREE_INT_CST (case_low));
9417 case_low = min_value;
9420 /* Saturate upper case label value to maximum. */
9421 if ((tree_int_cst_compare (case_low, max_value) <= 0)
9422 && (tree_int_cst_compare (case_high, max_value) > 0))
9424 warning ("upper value %d in case label range exceeds maximum value for type",
9425 TREE_INT_CST (case_high));
9426 case_high = max_value;
9430 add_case_node (case_low, case_high, CASE_LABEL (elt), &duplicate, true);
9435 expand_end_case_type (SWITCH_COND (exp), TREE_TYPE (exp));
9439 expand_label (TREE_OPERAND (exp, 0));
9442 case CASE_LABEL_EXPR:
9445 add_case_node (CASE_LOW (exp), CASE_HIGH (exp), CASE_LABEL (exp),
9453 expand_asm_expr (exp);
9457 return lang_hooks.expand_expr (exp, original_target, tmode,
9461 /* Here to do an ordinary binary operator, generating an instruction
9462 from the optab already placed in `this_optab'. */
9464 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
9465 subtarget, &op0, &op1, 0);
9467 if (modifier == EXPAND_STACK_PARM)
9469 temp = expand_binop (mode, this_optab, op0, op1, target,
9470 unsignedp, OPTAB_LIB_WIDEN);
9476 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9477 when applied to the address of EXP produces an address known to be
9478 aligned more than BIGGEST_ALIGNMENT. */
9481 is_aligning_offset (tree offset, tree exp)
9483 /* Strip off any conversions. */
9484 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9485 || TREE_CODE (offset) == NOP_EXPR
9486 || TREE_CODE (offset) == CONVERT_EXPR)
9487 offset = TREE_OPERAND (offset, 0);
9489 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9490 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9491 if (TREE_CODE (offset) != BIT_AND_EXPR
9492 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9493 || compare_tree_int (TREE_OPERAND (offset, 1),
9494 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
9495 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9498 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9499 It must be NEGATE_EXPR. Then strip any more conversions. */
9500 offset = TREE_OPERAND (offset, 0);
9501 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9502 || TREE_CODE (offset) == NOP_EXPR
9503 || TREE_CODE (offset) == CONVERT_EXPR)
9504 offset = TREE_OPERAND (offset, 0);
9506 if (TREE_CODE (offset) != NEGATE_EXPR)
9509 offset = TREE_OPERAND (offset, 0);
9510 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9511 || TREE_CODE (offset) == NOP_EXPR
9512 || TREE_CODE (offset) == CONVERT_EXPR)
9513 offset = TREE_OPERAND (offset, 0);
9515 /* This must now be the address of EXP. */
9516 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
9519 /* Return the tree node if an ARG corresponds to a string constant or zero
9520 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9521 in bytes within the string that ARG is accessing. The type of the
9522 offset will be `sizetype'. */
9525 string_constant (tree arg, tree *ptr_offset)
9529 if (TREE_CODE (arg) == ADDR_EXPR
9530 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9532 *ptr_offset = size_zero_node;
9533 return TREE_OPERAND (arg, 0);
9535 if (TREE_CODE (arg) == ADDR_EXPR
9536 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
9537 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg, 0), 0)) == STRING_CST)
9539 *ptr_offset = convert (sizetype, TREE_OPERAND (TREE_OPERAND (arg, 0), 1));
9540 return TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
9542 else if (TREE_CODE (arg) == PLUS_EXPR)
9544 tree arg0 = TREE_OPERAND (arg, 0);
9545 tree arg1 = TREE_OPERAND (arg, 1);
9550 if (TREE_CODE (arg0) == ADDR_EXPR
9551 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9553 *ptr_offset = convert (sizetype, arg1);
9554 return TREE_OPERAND (arg0, 0);
9556 else if (TREE_CODE (arg1) == ADDR_EXPR
9557 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9559 *ptr_offset = convert (sizetype, arg0);
9560 return TREE_OPERAND (arg1, 0);
9567 /* Expand code for a post- or pre- increment or decrement
9568 and return the RTX for the result.
9569 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9572 expand_increment (tree exp, int post, int ignore)
9576 tree incremented = TREE_OPERAND (exp, 0);
9577 optab this_optab = add_optab;
9579 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9580 int op0_is_copy = 0;
9581 int single_insn = 0;
9582 /* 1 means we can't store into OP0 directly,
9583 because it is a subreg narrower than a word,
9584 and we don't dare clobber the rest of the word. */
9587 /* Stabilize any component ref that might need to be
9588 evaluated more than once below. */
9590 || TREE_CODE (incremented) == BIT_FIELD_REF
9591 || (TREE_CODE (incremented) == COMPONENT_REF
9592 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9593 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9594 incremented = stabilize_reference (incremented);
9595 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9596 ones into save exprs so that they don't accidentally get evaluated
9597 more than once by the code below. */
9598 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9599 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9600 incremented = save_expr (incremented);
9602 /* Compute the operands as RTX.
9603 Note whether OP0 is the actual lvalue or a copy of it:
9604 I believe it is a copy iff it is a register or subreg
9605 and insns were generated in computing it. */
9607 temp = get_last_insn ();
9608 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
9610 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9611 in place but instead must do sign- or zero-extension during assignment,
9612 so we copy it into a new register and let the code below use it as
9615 Note that we can safely modify this SUBREG since it is know not to be
9616 shared (it was made by the expand_expr call above). */
9618 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9621 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9625 else if (GET_CODE (op0) == SUBREG
9626 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9628 /* We cannot increment this SUBREG in place. If we are
9629 post-incrementing, get a copy of the old value. Otherwise,
9630 just mark that we cannot increment in place. */
9632 op0 = copy_to_reg (op0);
9637 op0_is_copy = ((GET_CODE (op0) == SUBREG || REG_P (op0))
9638 && temp != get_last_insn ());
9639 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9641 /* Decide whether incrementing or decrementing. */
9642 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9643 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9644 this_optab = sub_optab;
9646 /* Convert decrement by a constant into a negative increment. */
9647 if (this_optab == sub_optab
9648 && GET_CODE (op1) == CONST_INT)
9650 op1 = GEN_INT (-INTVAL (op1));
9651 this_optab = add_optab;
9654 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9655 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9657 /* For a preincrement, see if we can do this with a single instruction. */
9660 icode = (int) this_optab->handlers[(int) mode].insn_code;
9661 if (icode != (int) CODE_FOR_nothing
9662 /* Make sure that OP0 is valid for operands 0 and 1
9663 of the insn we want to queue. */
9664 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9665 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9666 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9670 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9671 then we cannot just increment OP0. We must therefore contrive to
9672 increment the original value. Then, for postincrement, we can return
9673 OP0 since it is a copy of the old value. For preincrement, expand here
9674 unless we can do it with a single insn.
9676 Likewise if storing directly into OP0 would clobber high bits
9677 we need to preserve (bad_subreg). */
9678 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9680 /* This is the easiest way to increment the value wherever it is.
9681 Problems with multiple evaluation of INCREMENTED are prevented
9682 because either (1) it is a component_ref or preincrement,
9683 in which case it was stabilized above, or (2) it is an array_ref
9684 with constant index in an array in a register, which is
9685 safe to reevaluate. */
9686 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9687 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9688 ? MINUS_EXPR : PLUS_EXPR),
9691 TREE_OPERAND (exp, 1));
9693 while (TREE_CODE (incremented) == NOP_EXPR
9694 || TREE_CODE (incremented) == CONVERT_EXPR)
9696 newexp = convert (TREE_TYPE (incremented), newexp);
9697 incremented = TREE_OPERAND (incremented, 0);
9700 temp = expand_assignment (incremented, newexp, ! post && ! ignore);
9701 return post ? op0 : temp;
9706 /* We have a true reference to the value in OP0.
9707 If there is an insn to add or subtract in this mode, queue it.
9708 Queuing the increment insn avoids the register shuffling
9709 that often results if we must increment now and first save
9710 the old value for subsequent use. */
9712 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9713 op0 = stabilize (op0);
9716 icode = (int) this_optab->handlers[(int) mode].insn_code;
9717 if (icode != (int) CODE_FOR_nothing
9718 /* Make sure that OP0 is valid for operands 0 and 1
9719 of the insn we want to queue. */
9720 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9721 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9723 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9724 op1 = force_reg (mode, op1);
9726 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9728 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9730 rtx addr = (general_operand (XEXP (op0, 0), mode)
9731 ? force_reg (Pmode, XEXP (op0, 0))
9732 : copy_to_reg (XEXP (op0, 0)));
9735 op0 = replace_equiv_address (op0, addr);
9736 temp = force_reg (GET_MODE (op0), op0);
9737 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9738 op1 = force_reg (mode, op1);
9740 /* The increment queue is LIFO, thus we have to `queue'
9741 the instructions in reverse order. */
9742 enqueue_insn (op0, gen_move_insn (op0, temp));
9743 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9748 /* Preincrement, or we can't increment with one simple insn. */
9750 /* Save a copy of the value before inc or dec, to return it later. */
9751 temp = value = copy_to_reg (op0);
9753 /* Arrange to return the incremented value. */
9754 /* Copy the rtx because expand_binop will protect from the queue,
9755 and the results of that would be invalid for us to return
9756 if our caller does emit_queue before using our result. */
9757 temp = copy_rtx (value = op0);
9759 /* Increment however we can. */
9760 op1 = expand_binop (mode, this_optab, value, op1, op0,
9761 TYPE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9763 /* Make sure the value is stored into OP0. */
9765 emit_move_insn (op0, op1);
9770 /* Generate code to calculate EXP using a store-flag instruction
9771 and return an rtx for the result. EXP is either a comparison
9772 or a TRUTH_NOT_EXPR whose operand is a comparison.
9774 If TARGET is nonzero, store the result there if convenient.
9776 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
9779 Return zero if there is no suitable set-flag instruction
9780 available on this machine.
9782 Once expand_expr has been called on the arguments of the comparison,
9783 we are committed to doing the store flag, since it is not safe to
9784 re-evaluate the expression. We emit the store-flag insn by calling
9785 emit_store_flag, but only expand the arguments if we have a reason
9786 to believe that emit_store_flag will be successful. If we think that
9787 it will, but it isn't, we have to simulate the store-flag with a
9788 set/jump/set sequence. */
9791 do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
9794 tree arg0, arg1, type;
9796 enum machine_mode operand_mode;
9800 enum insn_code icode;
9801 rtx subtarget = target;
9804 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9805 result at the end. We can't simply invert the test since it would
9806 have already been inverted if it were valid. This case occurs for
9807 some floating-point comparisons. */
9809 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9810 invert = 1, exp = TREE_OPERAND (exp, 0);
9812 arg0 = TREE_OPERAND (exp, 0);
9813 arg1 = TREE_OPERAND (exp, 1);
9815 /* Don't crash if the comparison was erroneous. */
9816 if (arg0 == error_mark_node || arg1 == error_mark_node)
9819 type = TREE_TYPE (arg0);
9820 operand_mode = TYPE_MODE (type);
9821 unsignedp = TYPE_UNSIGNED (type);
9823 /* We won't bother with BLKmode store-flag operations because it would mean
9824 passing a lot of information to emit_store_flag. */
9825 if (operand_mode == BLKmode)
9828 /* We won't bother with store-flag operations involving function pointers
9829 when function pointers must be canonicalized before comparisons. */
9830 #ifdef HAVE_canonicalize_funcptr_for_compare
9831 if (HAVE_canonicalize_funcptr_for_compare
9832 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9833 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9835 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9836 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9837 == FUNCTION_TYPE))))
9844 /* Get the rtx comparison code to use. We know that EXP is a comparison
9845 operation of some type. Some comparisons against 1 and -1 can be
9846 converted to comparisons with zero. Do so here so that the tests
9847 below will be aware that we have a comparison with zero. These
9848 tests will not catch constants in the first operand, but constants
9849 are rarely passed as the first operand. */
9851 switch (TREE_CODE (exp))
9860 if (integer_onep (arg1))
9861 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9863 code = unsignedp ? LTU : LT;
9866 if (! unsignedp && integer_all_onesp (arg1))
9867 arg1 = integer_zero_node, code = LT;
9869 code = unsignedp ? LEU : LE;
9872 if (! unsignedp && integer_all_onesp (arg1))
9873 arg1 = integer_zero_node, code = GE;
9875 code = unsignedp ? GTU : GT;
9878 if (integer_onep (arg1))
9879 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9881 code = unsignedp ? GEU : GE;
9884 case UNORDERED_EXPR:
9913 /* Put a constant second. */
9914 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
9916 tem = arg0; arg0 = arg1; arg1 = tem;
9917 code = swap_condition (code);
9920 /* If this is an equality or inequality test of a single bit, we can
9921 do this by shifting the bit being tested to the low-order bit and
9922 masking the result with the constant 1. If the condition was EQ,
9923 we xor it with 1. This does not require an scc insn and is faster
9924 than an scc insn even if we have it.
9926 The code to make this transformation was moved into fold_single_bit_test,
9927 so we just call into the folder and expand its result. */
9929 if ((code == NE || code == EQ)
9930 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9931 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9933 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
9934 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
9936 target, VOIDmode, EXPAND_NORMAL);
9939 /* Now see if we are likely to be able to do this. Return if not. */
9940 if (! can_compare_p (code, operand_mode, ccp_store_flag))
9943 icode = setcc_gen_code[(int) code];
9944 if (icode == CODE_FOR_nothing
9945 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
9947 /* We can only do this if it is one of the special cases that
9948 can be handled without an scc insn. */
9949 if ((code == LT && integer_zerop (arg1))
9950 || (! only_cheap && code == GE && integer_zerop (arg1)))
9952 else if (BRANCH_COST >= 0
9953 && ! only_cheap && (code == NE || code == EQ)
9954 && TREE_CODE (type) != REAL_TYPE
9955 && ((abs_optab->handlers[(int) operand_mode].insn_code
9956 != CODE_FOR_nothing)
9957 || (ffs_optab->handlers[(int) operand_mode].insn_code
9958 != CODE_FOR_nothing)))
9964 if (! get_subtarget (target)
9965 || GET_MODE (subtarget) != operand_mode)
9968 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
9971 target = gen_reg_rtx (mode);
9973 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
9974 because, if the emit_store_flag does anything it will succeed and
9975 OP0 and OP1 will not be used subsequently. */
9977 result = emit_store_flag (target, code,
9978 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
9979 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
9980 operand_mode, unsignedp, 1);
9985 result = expand_binop (mode, xor_optab, result, const1_rtx,
9986 result, 0, OPTAB_LIB_WIDEN);
9990 /* If this failed, we have to do this with set/compare/jump/set code. */
9992 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9993 target = gen_reg_rtx (GET_MODE (target));
9995 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9996 result = compare_from_rtx (op0, op1, code, unsignedp,
9997 operand_mode, NULL_RTX);
9998 if (GET_CODE (result) == CONST_INT)
9999 return (((result == const0_rtx && ! invert)
10000 || (result != const0_rtx && invert))
10001 ? const0_rtx : const1_rtx);
10003 /* The code of RESULT may not match CODE if compare_from_rtx
10004 decided to swap its operands and reverse the original code.
10006 We know that compare_from_rtx returns either a CONST_INT or
10007 a new comparison code, so it is safe to just extract the
10008 code from RESULT. */
10009 code = GET_CODE (result);
10011 label = gen_label_rtx ();
10012 if (bcc_gen_fctn[(int) code] == 0)
10015 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10016 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10017 emit_label (label);
10023 /* Stubs in case we haven't got a casesi insn. */
10024 #ifndef HAVE_casesi
10025 # define HAVE_casesi 0
10026 # define gen_casesi(a, b, c, d, e) (0)
10027 # define CODE_FOR_casesi CODE_FOR_nothing
10030 /* If the machine does not have a case insn that compares the bounds,
10031 this means extra overhead for dispatch tables, which raises the
10032 threshold for using them. */
10033 #ifndef CASE_VALUES_THRESHOLD
10034 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10035 #endif /* CASE_VALUES_THRESHOLD */
10038 case_values_threshold (void)
10040 return CASE_VALUES_THRESHOLD;
10043 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10044 0 otherwise (i.e. if there is no casesi instruction). */
10046 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
10047 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
10049 enum machine_mode index_mode = SImode;
10050 int index_bits = GET_MODE_BITSIZE (index_mode);
10051 rtx op1, op2, index;
10052 enum machine_mode op_mode;
10057 /* Convert the index to SImode. */
10058 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10060 enum machine_mode omode = TYPE_MODE (index_type);
10061 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
10063 /* We must handle the endpoints in the original mode. */
10064 index_expr = build (MINUS_EXPR, index_type,
10065 index_expr, minval);
10066 minval = integer_zero_node;
10067 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10068 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10069 omode, 1, default_label);
10070 /* Now we can safely truncate. */
10071 index = convert_to_mode (index_mode, index, 0);
10075 if (TYPE_MODE (index_type) != index_mode)
10077 index_expr = convert (lang_hooks.types.type_for_size
10078 (index_bits, 0), index_expr);
10079 index_type = TREE_TYPE (index_expr);
10082 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10085 index = protect_from_queue (index, 0);
10086 do_pending_stack_adjust ();
10088 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
10089 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
10091 index = copy_to_mode_reg (op_mode, index);
10093 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
10095 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
10096 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
10097 op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
10098 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
10100 op1 = copy_to_mode_reg (op_mode, op1);
10102 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
10104 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
10105 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
10106 op2, TYPE_UNSIGNED (TREE_TYPE (range)));
10107 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
10109 op2 = copy_to_mode_reg (op_mode, op2);
10111 emit_jump_insn (gen_casesi (index, op1, op2,
10112 table_label, default_label));
10116 /* Attempt to generate a tablejump instruction; same concept. */
10117 #ifndef HAVE_tablejump
10118 #define HAVE_tablejump 0
10119 #define gen_tablejump(x, y) (0)
10122 /* Subroutine of the next function.
10124 INDEX is the value being switched on, with the lowest value
10125 in the table already subtracted.
10126 MODE is its expected mode (needed if INDEX is constant).
10127 RANGE is the length of the jump table.
10128 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10130 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10131 index value is out of range. */
10134 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
10139 if (INTVAL (range) > cfun->max_jumptable_ents)
10140 cfun->max_jumptable_ents = INTVAL (range);
10142 /* Do an unsigned comparison (in the proper mode) between the index
10143 expression and the value which represents the length of the range.
10144 Since we just finished subtracting the lower bound of the range
10145 from the index expression, this comparison allows us to simultaneously
10146 check that the original index expression value is both greater than
10147 or equal to the minimum value of the range and less than or equal to
10148 the maximum value of the range. */
10150 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10153 /* If index is in range, it must fit in Pmode.
10154 Convert to Pmode so we can index with it. */
10156 index = convert_to_mode (Pmode, index, 1);
10158 /* Don't let a MEM slip through, because then INDEX that comes
10159 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10160 and break_out_memory_refs will go to work on it and mess it up. */
10161 #ifdef PIC_CASE_VECTOR_ADDRESS
10162 if (flag_pic && !REG_P (index))
10163 index = copy_to_mode_reg (Pmode, index);
10166 /* If flag_force_addr were to affect this address
10167 it could interfere with the tricky assumptions made
10168 about addresses that contain label-refs,
10169 which may be valid only very near the tablejump itself. */
10170 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10171 GET_MODE_SIZE, because this indicates how large insns are. The other
10172 uses should all be Pmode, because they are addresses. This code
10173 could fail if addresses and insns are not the same size. */
10174 index = gen_rtx_PLUS (Pmode,
10175 gen_rtx_MULT (Pmode, index,
10176 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10177 gen_rtx_LABEL_REF (Pmode, table_label));
10178 #ifdef PIC_CASE_VECTOR_ADDRESS
10180 index = PIC_CASE_VECTOR_ADDRESS (index);
10183 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10184 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10185 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10186 RTX_UNCHANGING_P (vector) = 1;
10187 MEM_NOTRAP_P (vector) = 1;
10188 convert_move (temp, vector, 0);
10190 emit_jump_insn (gen_tablejump (temp, table_label));
10192 /* If we are generating PIC code or if the table is PC-relative, the
10193 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10194 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10199 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
10200 rtx table_label, rtx default_label)
10204 if (! HAVE_tablejump)
10207 index_expr = fold (build (MINUS_EXPR, index_type,
10208 convert (index_type, index_expr),
10209 convert (index_type, minval)));
10210 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10212 index = protect_from_queue (index, 0);
10213 do_pending_stack_adjust ();
10215 do_tablejump (index, TYPE_MODE (index_type),
10216 convert_modes (TYPE_MODE (index_type),
10217 TYPE_MODE (TREE_TYPE (range)),
10218 expand_expr (range, NULL_RTX,
10220 TYPE_UNSIGNED (TREE_TYPE (range))),
10221 table_label, default_label);
10225 /* Nonzero if the mode is a valid vector mode for this architecture.
10226 This returns nonzero even if there is no hardware support for the
10227 vector mode, but we can emulate with narrower modes. */
10230 vector_mode_valid_p (enum machine_mode mode)
10232 enum mode_class class = GET_MODE_CLASS (mode);
10233 enum machine_mode innermode;
10235 /* Doh! What's going on? */
10236 if (class != MODE_VECTOR_INT
10237 && class != MODE_VECTOR_FLOAT)
10240 /* Hardware support. Woo hoo! */
10241 if (VECTOR_MODE_SUPPORTED_P (mode))
10244 innermode = GET_MODE_INNER (mode);
10246 /* We should probably return 1 if requesting V4DI and we have no DI,
10247 but we have V2DI, but this is probably very unlikely. */
10249 /* If we have support for the inner mode, we can safely emulate it.
10250 We may not have V2DI, but me can emulate with a pair of DIs. */
10251 return mov_optab->handlers[innermode].insn_code != CODE_FOR_nothing;
10254 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
10256 const_vector_from_tree (tree exp)
10261 enum machine_mode inner, mode;
10263 mode = TYPE_MODE (TREE_TYPE (exp));
10265 if (initializer_zerop (exp))
10266 return CONST0_RTX (mode);
10268 units = GET_MODE_NUNITS (mode);
10269 inner = GET_MODE_INNER (mode);
10271 v = rtvec_alloc (units);
10273 link = TREE_VECTOR_CST_ELTS (exp);
10274 for (i = 0; link; link = TREE_CHAIN (link), ++i)
10276 elt = TREE_VALUE (link);
10278 if (TREE_CODE (elt) == REAL_CST)
10279 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
10282 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
10283 TREE_INT_CST_HIGH (elt),
10287 /* Initialize remaining elements to 0. */
10288 for (; i < units; ++i)
10289 RTVEC_ELT (v, i) = CONST0_RTX (inner);
10291 return gen_rtx_raw_CONST_VECTOR (mode, v);
10293 #include "gt-expr.h"