1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 #include "coretypes.h"
32 #include "hard-reg-set.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
44 #include "typeclass.h"
47 #include "langhooks.h"
52 /* Decide whether a function's arguments should be processed
53 from first to last or from last to first.
55 They should if the stack and args grow in opposite directions, but
56 only if we have push insns. */
60 #ifndef PUSH_ARGS_REVERSED
61 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
62 #define PUSH_ARGS_REVERSED /* If it's last to first. */
68 #ifndef STACK_PUSH_CODE
69 #ifdef STACK_GROWS_DOWNWARD
70 #define STACK_PUSH_CODE PRE_DEC
72 #define STACK_PUSH_CODE PRE_INC
76 /* Convert defined/undefined to boolean. */
77 #ifdef TARGET_MEM_FUNCTIONS
78 #undef TARGET_MEM_FUNCTIONS
79 #define TARGET_MEM_FUNCTIONS 1
81 #define TARGET_MEM_FUNCTIONS 0
85 /* If this is nonzero, we do not bother generating VOLATILE
86 around volatile memory references, and we are willing to
87 output indirect addresses. If cse is to follow, we reject
88 indirect addresses so a useful potential cse is generated;
89 if it is used only once, instruction combination will produce
90 the same indirect address eventually. */
93 /* This structure is used by move_by_pieces to describe the move to
104 int explicit_inc_from;
105 unsigned HOST_WIDE_INT len;
106 HOST_WIDE_INT offset;
110 /* This structure is used by store_by_pieces to describe the clear to
113 struct store_by_pieces
119 unsigned HOST_WIDE_INT len;
120 HOST_WIDE_INT offset;
121 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
126 static rtx enqueue_insn (rtx, rtx);
127 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
129 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
130 struct move_by_pieces *);
131 static bool block_move_libcall_safe_for_call_parm (void);
132 static bool emit_block_move_via_movstr (rtx, rtx, rtx, unsigned);
133 static rtx emit_block_move_via_libcall (rtx, rtx, rtx);
134 static tree emit_block_move_libcall_fn (int);
135 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
136 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
137 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
138 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
139 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
140 struct store_by_pieces *);
141 static bool clear_storage_via_clrstr (rtx, rtx, unsigned);
142 static rtx clear_storage_via_libcall (rtx, rtx);
143 static tree clear_storage_libcall_fn (int);
144 static rtx compress_float_constant (rtx, rtx);
145 static rtx get_subtarget (rtx);
146 static int is_zeros_p (tree);
147 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
148 HOST_WIDE_INT, enum machine_mode,
149 tree, tree, int, int);
150 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
151 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
152 tree, enum machine_mode, int, tree, int);
153 static rtx var_rtx (tree);
155 static unsigned HOST_WIDE_INT highest_pow2_factor (tree);
156 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (tree, tree);
158 static int is_aligning_offset (tree, tree);
159 static rtx expand_increment (tree, int, int);
160 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
161 enum expand_modifier);
162 static rtx do_store_flag (tree, rtx, enum machine_mode, int);
164 static void emit_single_push_insn (enum machine_mode, rtx, tree);
166 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
167 static rtx const_vector_from_tree (tree);
169 /* Record for each mode whether we can move a register directly to or
170 from an object of that mode in memory. If we can't, we won't try
171 to use that mode directly when accessing a field of that mode. */
173 static char direct_load[NUM_MACHINE_MODES];
174 static char direct_store[NUM_MACHINE_MODES];
176 /* Record for each mode whether we can float-extend from memory. */
178 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
180 /* This macro is used to determine whether move_by_pieces should be called
181 to perform a structure copy. */
182 #ifndef MOVE_BY_PIECES_P
183 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
184 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
187 /* This macro is used to determine whether clear_by_pieces should be
188 called to clear storage. */
189 #ifndef CLEAR_BY_PIECES_P
190 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
191 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
194 /* This macro is used to determine whether store_by_pieces should be
195 called to "memset" storage with byte values other than zero, or
196 to "memcpy" storage when the source is a constant string. */
197 #ifndef STORE_BY_PIECES_P
198 #define STORE_BY_PIECES_P(SIZE, ALIGN) MOVE_BY_PIECES_P (SIZE, ALIGN)
201 /* This array records the insn_code of insns to perform block moves. */
202 enum insn_code movstr_optab[NUM_MACHINE_MODES];
204 /* This array records the insn_code of insns to perform block clears. */
205 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
207 /* These arrays record the insn_code of two different kinds of insns
208 to perform block compares. */
209 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
210 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
212 /* Stack of EXPR_WITH_FILE_LOCATION nested expressions. */
213 struct file_stack *expr_wfl_stack;
215 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
217 #ifndef SLOW_UNALIGNED_ACCESS
218 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
221 /* This is run once per compilation to set up which modes can be used
222 directly in memory and to initialize the block move optab. */
225 init_expr_once (void)
228 enum machine_mode mode;
233 /* Try indexing by frame ptr and try by stack ptr.
234 It is known that on the Convex the stack ptr isn't a valid index.
235 With luck, one or the other is valid on any machine. */
236 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
237 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
239 /* A scratch register we can modify in-place below to avoid
240 useless RTL allocations. */
241 reg = gen_rtx_REG (VOIDmode, -1);
243 insn = rtx_alloc (INSN);
244 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
245 PATTERN (insn) = pat;
247 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
248 mode = (enum machine_mode) ((int) mode + 1))
252 direct_load[(int) mode] = direct_store[(int) mode] = 0;
253 PUT_MODE (mem, mode);
254 PUT_MODE (mem1, mode);
255 PUT_MODE (reg, mode);
257 /* See if there is some register that can be used in this mode and
258 directly loaded or stored from memory. */
260 if (mode != VOIDmode && mode != BLKmode)
261 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
262 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
265 if (! HARD_REGNO_MODE_OK (regno, mode))
271 SET_DEST (pat) = reg;
272 if (recog (pat, insn, &num_clobbers) >= 0)
273 direct_load[(int) mode] = 1;
275 SET_SRC (pat) = mem1;
276 SET_DEST (pat) = reg;
277 if (recog (pat, insn, &num_clobbers) >= 0)
278 direct_load[(int) mode] = 1;
281 SET_DEST (pat) = mem;
282 if (recog (pat, insn, &num_clobbers) >= 0)
283 direct_store[(int) mode] = 1;
286 SET_DEST (pat) = mem1;
287 if (recog (pat, insn, &num_clobbers) >= 0)
288 direct_store[(int) mode] = 1;
292 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
294 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
295 mode = GET_MODE_WIDER_MODE (mode))
297 enum machine_mode srcmode;
298 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
299 srcmode = GET_MODE_WIDER_MODE (srcmode))
303 ic = can_extend_p (mode, srcmode, 0);
304 if (ic == CODE_FOR_nothing)
307 PUT_MODE (mem, srcmode);
309 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
310 float_extend_from_mem[mode][srcmode] = true;
315 /* This is run at the start of compiling a function. */
320 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
323 /* Small sanity check that the queue is empty at the end of a function. */
326 finish_expr_for_function (void)
332 /* Manage the queue of increment instructions to be output
333 for POSTINCREMENT_EXPR expressions, etc. */
335 /* Queue up to increment (or change) VAR later. BODY says how:
336 BODY should be the same thing you would pass to emit_insn
337 to increment right away. It will go to emit_insn later on.
339 The value is a QUEUED expression to be used in place of VAR
340 where you want to guarantee the pre-incrementation value of VAR. */
343 enqueue_insn (rtx var, rtx body)
345 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
346 body, pending_chain);
347 return pending_chain;
350 /* Use protect_from_queue to convert a QUEUED expression
351 into something that you can put immediately into an instruction.
352 If the queued incrementation has not happened yet,
353 protect_from_queue returns the variable itself.
354 If the incrementation has happened, protect_from_queue returns a temp
355 that contains a copy of the old value of the variable.
357 Any time an rtx which might possibly be a QUEUED is to be put
358 into an instruction, it must be passed through protect_from_queue first.
359 QUEUED expressions are not meaningful in instructions.
361 Do not pass a value through protect_from_queue and then hold
362 on to it for a while before putting it in an instruction!
363 If the queue is flushed in between, incorrect code will result. */
366 protect_from_queue (rtx x, int modify)
368 RTX_CODE code = GET_CODE (x);
370 #if 0 /* A QUEUED can hang around after the queue is forced out. */
371 /* Shortcut for most common case. */
372 if (pending_chain == 0)
378 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
379 use of autoincrement. Make a copy of the contents of the memory
380 location rather than a copy of the address, but not if the value is
381 of mode BLKmode. Don't modify X in place since it might be
383 if (code == MEM && GET_MODE (x) != BLKmode
384 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
387 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
391 rtx temp = gen_reg_rtx (GET_MODE (x));
393 emit_insn_before (gen_move_insn (temp, new),
398 /* Copy the address into a pseudo, so that the returned value
399 remains correct across calls to emit_queue. */
400 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
403 /* Otherwise, recursively protect the subexpressions of all
404 the kinds of rtx's that can contain a QUEUED. */
407 rtx tem = protect_from_queue (XEXP (x, 0), 0);
408 if (tem != XEXP (x, 0))
414 else if (code == PLUS || code == MULT)
416 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
417 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
418 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
427 /* If the increment has not happened, use the variable itself. Copy it
428 into a new pseudo so that the value remains correct across calls to
430 if (QUEUED_INSN (x) == 0)
431 return copy_to_reg (QUEUED_VAR (x));
432 /* If the increment has happened and a pre-increment copy exists,
434 if (QUEUED_COPY (x) != 0)
435 return QUEUED_COPY (x);
436 /* The increment has happened but we haven't set up a pre-increment copy.
437 Set one up now, and use it. */
438 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
439 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
441 return QUEUED_COPY (x);
444 /* Return nonzero if X contains a QUEUED expression:
445 if it contains anything that will be altered by a queued increment.
446 We handle only combinations of MEM, PLUS, MINUS and MULT operators
447 since memory addresses generally contain only those. */
450 queued_subexp_p (rtx x)
452 enum rtx_code code = GET_CODE (x);
458 return queued_subexp_p (XEXP (x, 0));
462 return (queued_subexp_p (XEXP (x, 0))
463 || queued_subexp_p (XEXP (x, 1)));
469 /* Retrieve a mark on the queue. */
474 return pending_chain;
477 /* Perform all the pending incrementations that have been enqueued
478 after MARK was retrieved. If MARK is null, perform all the
479 pending incrementations. */
482 emit_insns_enqueued_after_mark (rtx mark)
486 /* The marked incrementation may have been emitted in the meantime
487 through a call to emit_queue. In this case, the mark is not valid
488 anymore so do nothing. */
489 if (mark && ! QUEUED_BODY (mark))
492 while ((p = pending_chain) != mark)
494 rtx body = QUEUED_BODY (p);
496 switch (GET_CODE (body))
504 QUEUED_INSN (p) = body;
508 #ifdef ENABLE_CHECKING
515 QUEUED_INSN (p) = emit_insn (body);
520 pending_chain = QUEUED_NEXT (p);
524 /* Perform all the pending incrementations. */
529 emit_insns_enqueued_after_mark (NULL_RTX);
532 /* Copy data from FROM to TO, where the machine modes are not the same.
533 Both modes may be integer, or both may be floating.
534 UNSIGNEDP should be nonzero if FROM is an unsigned type.
535 This causes zero-extension instead of sign-extension. */
538 convert_move (rtx to, rtx from, int unsignedp)
540 enum machine_mode to_mode = GET_MODE (to);
541 enum machine_mode from_mode = GET_MODE (from);
542 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
543 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
547 /* rtx code for making an equivalent value. */
548 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
549 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
551 to = protect_from_queue (to, 1);
552 from = protect_from_queue (from, 0);
554 if (to_real != from_real)
557 /* If FROM is a SUBREG that indicates that we have already done at least
558 the required extension, strip it. We don't handle such SUBREGs as
561 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
562 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
563 >= GET_MODE_SIZE (to_mode))
564 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
565 from = gen_lowpart (to_mode, from), from_mode = to_mode;
567 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
570 if (to_mode == from_mode
571 || (from_mode == VOIDmode && CONSTANT_P (from)))
573 emit_move_insn (to, from);
577 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
579 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
582 if (VECTOR_MODE_P (to_mode))
583 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
585 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
587 emit_move_insn (to, from);
591 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
593 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
594 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
603 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
605 else if (GET_MODE_PRECISION (from_mode) > GET_MODE_PRECISION (to_mode))
610 /* Try converting directly if the insn is supported. */
612 code = tab->handlers[to_mode][from_mode].insn_code;
613 if (code != CODE_FOR_nothing)
615 emit_unop_insn (code, to, from,
616 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
620 /* Otherwise use a libcall. */
621 libcall = tab->handlers[to_mode][from_mode].libfunc;
624 /* This conversion is not implemented yet. */
628 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
630 insns = get_insns ();
632 emit_libcall_block (insns, to, value,
633 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
635 : gen_rtx_FLOAT_EXTEND (to_mode, from));
639 /* Handle pointer conversion. */ /* SPEE 900220. */
640 /* Targets are expected to provide conversion insns between PxImode and
641 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
642 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
644 enum machine_mode full_mode
645 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
647 if (trunc_optab->handlers[to_mode][full_mode].insn_code
651 if (full_mode != from_mode)
652 from = convert_to_mode (full_mode, from, unsignedp);
653 emit_unop_insn (trunc_optab->handlers[to_mode][full_mode].insn_code,
657 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
659 enum machine_mode full_mode
660 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
662 if (sext_optab->handlers[full_mode][from_mode].insn_code
666 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
668 if (to_mode == full_mode)
671 /* else proceed to integer conversions below. */
672 from_mode = full_mode;
675 /* Now both modes are integers. */
677 /* Handle expanding beyond a word. */
678 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
679 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
686 enum machine_mode lowpart_mode;
687 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
689 /* Try converting directly if the insn is supported. */
690 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
693 /* If FROM is a SUBREG, put it into a register. Do this
694 so that we always generate the same set of insns for
695 better cse'ing; if an intermediate assignment occurred,
696 we won't be doing the operation directly on the SUBREG. */
697 if (optimize > 0 && GET_CODE (from) == SUBREG)
698 from = force_reg (from_mode, from);
699 emit_unop_insn (code, to, from, equiv_code);
702 /* Next, try converting via full word. */
703 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
704 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
705 != CODE_FOR_nothing))
707 if (GET_CODE (to) == REG)
709 if (reg_overlap_mentioned_p (to, from))
710 from = force_reg (from_mode, from);
711 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
713 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
714 emit_unop_insn (code, to,
715 gen_lowpart (word_mode, to), equiv_code);
719 /* No special multiword conversion insn; do it by hand. */
722 /* Since we will turn this into a no conflict block, we must ensure
723 that the source does not overlap the target. */
725 if (reg_overlap_mentioned_p (to, from))
726 from = force_reg (from_mode, from);
728 /* Get a copy of FROM widened to a word, if necessary. */
729 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
730 lowpart_mode = word_mode;
732 lowpart_mode = from_mode;
734 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
736 lowpart = gen_lowpart (lowpart_mode, to);
737 emit_move_insn (lowpart, lowfrom);
739 /* Compute the value to put in each remaining word. */
741 fill_value = const0_rtx;
746 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
747 && STORE_FLAG_VALUE == -1)
749 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
751 fill_value = gen_reg_rtx (word_mode);
752 emit_insn (gen_slt (fill_value));
758 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
759 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
761 fill_value = convert_to_mode (word_mode, fill_value, 1);
765 /* Fill the remaining words. */
766 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
768 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
769 rtx subword = operand_subword (to, index, 1, to_mode);
774 if (fill_value != subword)
775 emit_move_insn (subword, fill_value);
778 insns = get_insns ();
781 emit_no_conflict_block (insns, to, from, NULL_RTX,
782 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
786 /* Truncating multi-word to a word or less. */
787 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
788 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
790 if (!((GET_CODE (from) == MEM
791 && ! MEM_VOLATILE_P (from)
792 && direct_load[(int) to_mode]
793 && ! mode_dependent_address_p (XEXP (from, 0)))
794 || GET_CODE (from) == REG
795 || GET_CODE (from) == SUBREG))
796 from = force_reg (from_mode, from);
797 convert_move (to, gen_lowpart (word_mode, from), 0);
801 /* Now follow all the conversions between integers
802 no more than a word long. */
804 /* For truncation, usually we can just refer to FROM in a narrower mode. */
805 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
806 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
807 GET_MODE_BITSIZE (from_mode)))
809 if (!((GET_CODE (from) == MEM
810 && ! MEM_VOLATILE_P (from)
811 && direct_load[(int) to_mode]
812 && ! mode_dependent_address_p (XEXP (from, 0)))
813 || GET_CODE (from) == REG
814 || GET_CODE (from) == SUBREG))
815 from = force_reg (from_mode, from);
816 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
817 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
818 from = copy_to_reg (from);
819 emit_move_insn (to, gen_lowpart (to_mode, from));
823 /* Handle extension. */
824 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
826 /* Convert directly if that works. */
827 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
831 from = force_not_mem (from);
833 emit_unop_insn (code, to, from, equiv_code);
838 enum machine_mode intermediate;
842 /* Search for a mode to convert via. */
843 for (intermediate = from_mode; intermediate != VOIDmode;
844 intermediate = GET_MODE_WIDER_MODE (intermediate))
845 if (((can_extend_p (to_mode, intermediate, unsignedp)
847 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
848 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
849 GET_MODE_BITSIZE (intermediate))))
850 && (can_extend_p (intermediate, from_mode, unsignedp)
851 != CODE_FOR_nothing))
853 convert_move (to, convert_to_mode (intermediate, from,
854 unsignedp), unsignedp);
858 /* No suitable intermediate mode.
859 Generate what we need with shifts. */
860 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
861 - GET_MODE_BITSIZE (from_mode), 0);
862 from = gen_lowpart (to_mode, force_reg (from_mode, from));
863 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
865 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
868 emit_move_insn (to, tmp);
873 /* Support special truncate insns for certain modes. */
874 if (trunc_optab->handlers[to_mode][from_mode].insn_code != CODE_FOR_nothing)
876 emit_unop_insn (trunc_optab->handlers[to_mode][from_mode].insn_code,
881 /* Handle truncation of volatile memrefs, and so on;
882 the things that couldn't be truncated directly,
883 and for which there was no special instruction.
885 ??? Code above formerly short-circuited this, for most integer
886 mode pairs, with a force_reg in from_mode followed by a recursive
887 call to this routine. Appears always to have been wrong. */
888 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
890 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
891 emit_move_insn (to, temp);
895 /* Mode combination is not recognized. */
899 /* Return an rtx for a value that would result
900 from converting X to mode MODE.
901 Both X and MODE may be floating, or both integer.
902 UNSIGNEDP is nonzero if X is an unsigned value.
903 This can be done by referring to a part of X in place
904 or by copying to a new temporary with conversion.
906 This function *must not* call protect_from_queue
907 except when putting X into an insn (in which case convert_move does it). */
910 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
912 return convert_modes (mode, VOIDmode, x, unsignedp);
915 /* Return an rtx for a value that would result
916 from converting X from mode OLDMODE to mode MODE.
917 Both modes may be floating, or both integer.
918 UNSIGNEDP is nonzero if X is an unsigned value.
920 This can be done by referring to a part of X in place
921 or by copying to a new temporary with conversion.
923 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
925 This function *must not* call protect_from_queue
926 except when putting X into an insn (in which case convert_move does it). */
929 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
933 /* If FROM is a SUBREG that indicates that we have already done at least
934 the required extension, strip it. */
936 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
937 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
938 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
939 x = gen_lowpart (mode, x);
941 if (GET_MODE (x) != VOIDmode)
942 oldmode = GET_MODE (x);
947 /* There is one case that we must handle specially: If we are converting
948 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
949 we are to interpret the constant as unsigned, gen_lowpart will do
950 the wrong if the constant appears negative. What we want to do is
951 make the high-order word of the constant zero, not all ones. */
953 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
954 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
955 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
957 HOST_WIDE_INT val = INTVAL (x);
959 if (oldmode != VOIDmode
960 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
962 int width = GET_MODE_BITSIZE (oldmode);
964 /* We need to zero extend VAL. */
965 val &= ((HOST_WIDE_INT) 1 << width) - 1;
968 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
971 /* We can do this with a gen_lowpart if both desired and current modes
972 are integer, and this is either a constant integer, a register, or a
973 non-volatile MEM. Except for the constant case where MODE is no
974 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
976 if ((GET_CODE (x) == CONST_INT
977 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
978 || (GET_MODE_CLASS (mode) == MODE_INT
979 && GET_MODE_CLASS (oldmode) == MODE_INT
980 && (GET_CODE (x) == CONST_DOUBLE
981 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
982 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
983 && direct_load[(int) mode])
984 || (GET_CODE (x) == REG
985 && (! HARD_REGISTER_P (x)
986 || HARD_REGNO_MODE_OK (REGNO (x), mode))
987 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
988 GET_MODE_BITSIZE (GET_MODE (x)))))))))
990 /* ?? If we don't know OLDMODE, we have to assume here that
991 X does not need sign- or zero-extension. This may not be
992 the case, but it's the best we can do. */
993 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
994 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
996 HOST_WIDE_INT val = INTVAL (x);
997 int width = GET_MODE_BITSIZE (oldmode);
999 /* We must sign or zero-extend in this case. Start by
1000 zero-extending, then sign extend if we need to. */
1001 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1003 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1004 val |= (HOST_WIDE_INT) (-1) << width;
1006 return gen_int_mode (val, mode);
1009 return gen_lowpart (mode, x);
1012 /* Converting from integer constant into mode is always equivalent to an
1013 subreg operation. */
1014 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
1016 if (GET_MODE_BITSIZE (mode) != GET_MODE_BITSIZE (oldmode))
1018 return simplify_gen_subreg (mode, x, oldmode, 0);
1021 temp = gen_reg_rtx (mode);
1022 convert_move (temp, x, unsignedp);
1026 /* STORE_MAX_PIECES is the number of bytes at a time that we can
1027 store efficiently. Due to internal GCC limitations, this is
1028 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1029 for an immediate constant. */
1031 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1033 /* Determine whether the LEN bytes can be moved by using several move
1034 instructions. Return nonzero if a call to move_by_pieces should
1038 can_move_by_pieces (unsigned HOST_WIDE_INT len,
1039 unsigned int align ATTRIBUTE_UNUSED)
1041 return MOVE_BY_PIECES_P (len, align);
1044 /* Generate several move instructions to copy LEN bytes from block FROM to
1045 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1046 and TO through protect_from_queue before calling.
1048 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1049 used to push FROM to the stack.
1051 ALIGN is maximum stack alignment we can assume.
1053 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
1054 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
1058 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
1059 unsigned int align, int endp)
1061 struct move_by_pieces data;
1062 rtx to_addr, from_addr = XEXP (from, 0);
1063 unsigned int max_size = MOVE_MAX_PIECES + 1;
1064 enum machine_mode mode = VOIDmode, tmode;
1065 enum insn_code icode;
1067 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
1070 data.from_addr = from_addr;
1073 to_addr = XEXP (to, 0);
1076 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1077 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1079 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1086 #ifdef STACK_GROWS_DOWNWARD
1092 data.to_addr = to_addr;
1095 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1096 || GET_CODE (from_addr) == POST_INC
1097 || GET_CODE (from_addr) == POST_DEC);
1099 data.explicit_inc_from = 0;
1100 data.explicit_inc_to = 0;
1101 if (data.reverse) data.offset = len;
1104 /* If copying requires more than two move insns,
1105 copy addresses to registers (to make displacements shorter)
1106 and use post-increment if available. */
1107 if (!(data.autinc_from && data.autinc_to)
1108 && move_by_pieces_ninsns (len, align) > 2)
1110 /* Find the mode of the largest move... */
1111 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1112 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1113 if (GET_MODE_SIZE (tmode) < max_size)
1116 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1118 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1119 data.autinc_from = 1;
1120 data.explicit_inc_from = -1;
1122 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1124 data.from_addr = copy_addr_to_reg (from_addr);
1125 data.autinc_from = 1;
1126 data.explicit_inc_from = 1;
1128 if (!data.autinc_from && CONSTANT_P (from_addr))
1129 data.from_addr = copy_addr_to_reg (from_addr);
1130 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1132 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1134 data.explicit_inc_to = -1;
1136 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1138 data.to_addr = copy_addr_to_reg (to_addr);
1140 data.explicit_inc_to = 1;
1142 if (!data.autinc_to && CONSTANT_P (to_addr))
1143 data.to_addr = copy_addr_to_reg (to_addr);
1146 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1147 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1148 align = MOVE_MAX * BITS_PER_UNIT;
1150 /* First move what we can in the largest integer mode, then go to
1151 successively smaller modes. */
1153 while (max_size > 1)
1155 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1156 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1157 if (GET_MODE_SIZE (tmode) < max_size)
1160 if (mode == VOIDmode)
1163 icode = mov_optab->handlers[(int) mode].insn_code;
1164 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1165 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1167 max_size = GET_MODE_SIZE (mode);
1170 /* The code above should have handled everything. */
1184 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1185 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1187 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1190 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1197 to1 = adjust_address (data.to, QImode, data.offset);
1205 /* Return number of insns required to move L bytes by pieces.
1206 ALIGN (in bits) is maximum alignment we can assume. */
1208 static unsigned HOST_WIDE_INT
1209 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align)
1211 unsigned HOST_WIDE_INT n_insns = 0;
1212 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1214 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1215 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1216 align = MOVE_MAX * BITS_PER_UNIT;
1218 while (max_size > 1)
1220 enum machine_mode mode = VOIDmode, tmode;
1221 enum insn_code icode;
1223 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1224 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1225 if (GET_MODE_SIZE (tmode) < max_size)
1228 if (mode == VOIDmode)
1231 icode = mov_optab->handlers[(int) mode].insn_code;
1232 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1233 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1235 max_size = GET_MODE_SIZE (mode);
1243 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1244 with move instructions for mode MODE. GENFUN is the gen_... function
1245 to make a move insn for that mode. DATA has all the other info. */
1248 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1249 struct move_by_pieces *data)
1251 unsigned int size = GET_MODE_SIZE (mode);
1252 rtx to1 = NULL_RTX, from1;
1254 while (data->len >= size)
1257 data->offset -= size;
1261 if (data->autinc_to)
1262 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1265 to1 = adjust_address (data->to, mode, data->offset);
1268 if (data->autinc_from)
1269 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1272 from1 = adjust_address (data->from, mode, data->offset);
1274 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1275 emit_insn (gen_add2_insn (data->to_addr,
1276 GEN_INT (-(HOST_WIDE_INT)size)));
1277 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1278 emit_insn (gen_add2_insn (data->from_addr,
1279 GEN_INT (-(HOST_WIDE_INT)size)));
1282 emit_insn ((*genfun) (to1, from1));
1285 #ifdef PUSH_ROUNDING
1286 emit_single_push_insn (mode, from1, NULL);
1292 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1293 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1294 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1295 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1297 if (! data->reverse)
1298 data->offset += size;
1304 /* Emit code to move a block Y to a block X. This may be done with
1305 string-move instructions, with multiple scalar move instructions,
1306 or with a library call.
1308 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1309 SIZE is an rtx that says how long they are.
1310 ALIGN is the maximum alignment we can assume they have.
1311 METHOD describes what kind of copy this is, and what mechanisms may be used.
1313 Return the address of the new block, if memcpy is called and returns it,
1317 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1325 case BLOCK_OP_NORMAL:
1326 may_use_call = true;
1329 case BLOCK_OP_CALL_PARM:
1330 may_use_call = block_move_libcall_safe_for_call_parm ();
1332 /* Make inhibit_defer_pop nonzero around the library call
1333 to force it to pop the arguments right away. */
1337 case BLOCK_OP_NO_LIBCALL:
1338 may_use_call = false;
1345 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1347 if (GET_MODE (x) != BLKmode)
1349 if (GET_MODE (y) != BLKmode)
1352 x = protect_from_queue (x, 1);
1353 y = protect_from_queue (y, 0);
1354 size = protect_from_queue (size, 0);
1356 if (GET_CODE (x) != MEM)
1358 if (GET_CODE (y) != MEM)
1363 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1364 can be incorrect is coming from __builtin_memcpy. */
1365 if (GET_CODE (size) == CONST_INT)
1367 if (INTVAL (size) == 0)
1370 x = shallow_copy_rtx (x);
1371 y = shallow_copy_rtx (y);
1372 set_mem_size (x, size);
1373 set_mem_size (y, size);
1376 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1377 move_by_pieces (x, y, INTVAL (size), align, 0);
1378 else if (emit_block_move_via_movstr (x, y, size, align))
1380 else if (may_use_call)
1381 retval = emit_block_move_via_libcall (x, y, size);
1383 emit_block_move_via_loop (x, y, size, align);
1385 if (method == BLOCK_OP_CALL_PARM)
1391 /* A subroutine of emit_block_move. Returns true if calling the
1392 block move libcall will not clobber any parameters which may have
1393 already been placed on the stack. */
1396 block_move_libcall_safe_for_call_parm (void)
1398 /* If arguments are pushed on the stack, then they're safe. */
1402 /* If registers go on the stack anyway, any argument is sure to clobber
1403 an outgoing argument. */
1404 #if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1406 tree fn = emit_block_move_libcall_fn (false);
1408 if (REG_PARM_STACK_SPACE (fn) != 0)
1413 /* If any argument goes in memory, then it might clobber an outgoing
1416 CUMULATIVE_ARGS args_so_far;
1419 fn = emit_block_move_libcall_fn (false);
1420 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1422 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1423 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1425 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1426 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1427 if (!tmp || !REG_P (tmp))
1429 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1430 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1434 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1440 /* A subroutine of emit_block_move. Expand a movstr pattern;
1441 return true if successful. */
1444 emit_block_move_via_movstr (rtx x, rtx y, rtx size, unsigned int align)
1446 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1447 int save_volatile_ok = volatile_ok;
1448 enum machine_mode mode;
1450 /* Since this is a move insn, we don't care about volatility. */
1453 /* Try the most limited insn first, because there's no point
1454 including more than one in the machine description unless
1455 the more limited one has some advantage. */
1457 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1458 mode = GET_MODE_WIDER_MODE (mode))
1460 enum insn_code code = movstr_optab[(int) mode];
1461 insn_operand_predicate_fn pred;
1463 if (code != CODE_FOR_nothing
1464 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1465 here because if SIZE is less than the mode mask, as it is
1466 returned by the macro, it will definitely be less than the
1467 actual mode mask. */
1468 && ((GET_CODE (size) == CONST_INT
1469 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1470 <= (GET_MODE_MASK (mode) >> 1)))
1471 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1472 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1473 || (*pred) (x, BLKmode))
1474 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1475 || (*pred) (y, BLKmode))
1476 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1477 || (*pred) (opalign, VOIDmode)))
1480 rtx last = get_last_insn ();
1483 op2 = convert_to_mode (mode, size, 1);
1484 pred = insn_data[(int) code].operand[2].predicate;
1485 if (pred != 0 && ! (*pred) (op2, mode))
1486 op2 = copy_to_mode_reg (mode, op2);
1488 /* ??? When called via emit_block_move_for_call, it'd be
1489 nice if there were some way to inform the backend, so
1490 that it doesn't fail the expansion because it thinks
1491 emitting the libcall would be more efficient. */
1493 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1497 volatile_ok = save_volatile_ok;
1501 delete_insns_since (last);
1505 volatile_ok = save_volatile_ok;
1509 /* A subroutine of emit_block_move. Expand a call to memcpy or bcopy.
1510 Return the return value from memcpy, 0 otherwise. */
1513 emit_block_move_via_libcall (rtx dst, rtx src, rtx size)
1515 rtx dst_addr, src_addr;
1516 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1517 enum machine_mode size_mode;
1520 /* DST, SRC, or SIZE may have been passed through protect_from_queue.
1522 It is unsafe to save the value generated by protect_from_queue and reuse
1523 it later. Consider what happens if emit_queue is called before the
1524 return value from protect_from_queue is used.
1526 Expansion of the CALL_EXPR below will call emit_queue before we are
1527 finished emitting RTL for argument setup. So if we are not careful we
1528 could get the wrong value for an argument.
1530 To avoid this problem we go ahead and emit code to copy the addresses of
1531 DST and SRC and SIZE into new pseudos. We can then place those new
1532 pseudos into an RTL_EXPR and use them later, even after a call to
1535 Note this is not strictly needed for library calls since they do not call
1536 emit_queue before loading their arguments. However, we may need to have
1537 library calls call emit_queue in the future since failing to do so could
1538 cause problems for targets which define SMALL_REGISTER_CLASSES and pass
1539 arguments in registers. */
1541 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1542 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1544 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1545 src_addr = convert_memory_address (ptr_mode, src_addr);
1547 dst_tree = make_tree (ptr_type_node, dst_addr);
1548 src_tree = make_tree (ptr_type_node, src_addr);
1550 if (TARGET_MEM_FUNCTIONS)
1551 size_mode = TYPE_MODE (sizetype);
1553 size_mode = TYPE_MODE (unsigned_type_node);
1555 size = convert_to_mode (size_mode, size, 1);
1556 size = copy_to_mode_reg (size_mode, size);
1558 /* It is incorrect to use the libcall calling conventions to call
1559 memcpy in this context. This could be a user call to memcpy and
1560 the user may wish to examine the return value from memcpy. For
1561 targets where libcalls and normal calls have different conventions
1562 for returning pointers, we could end up generating incorrect code.
1564 For convenience, we generate the call to bcopy this way as well. */
1566 if (TARGET_MEM_FUNCTIONS)
1567 size_tree = make_tree (sizetype, size);
1569 size_tree = make_tree (unsigned_type_node, size);
1571 fn = emit_block_move_libcall_fn (true);
1572 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1573 if (TARGET_MEM_FUNCTIONS)
1575 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1576 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1580 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1581 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1584 /* Now we have to build up the CALL_EXPR itself. */
1585 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1586 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1587 call_expr, arg_list, NULL_TREE);
1589 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1591 /* If we are initializing a readonly value, show the above call clobbered
1592 it. Otherwise, a load from it may erroneously be hoisted from a loop, or
1593 the delay slot scheduler might overlook conflicts and take nasty
1595 if (RTX_UNCHANGING_P (dst))
1596 add_function_usage_to
1597 (last_call_insn (), gen_rtx_EXPR_LIST (VOIDmode,
1598 gen_rtx_CLOBBER (VOIDmode, dst),
1601 return TARGET_MEM_FUNCTIONS ? retval : NULL_RTX;
1604 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1605 for the function we use for block copies. The first time FOR_CALL
1606 is true, we call assemble_external. */
1608 static GTY(()) tree block_move_fn;
1611 init_block_move_fn (const char *asmspec)
1617 if (TARGET_MEM_FUNCTIONS)
1619 fn = get_identifier ("memcpy");
1620 args = build_function_type_list (ptr_type_node, ptr_type_node,
1621 const_ptr_type_node, sizetype,
1626 fn = get_identifier ("bcopy");
1627 args = build_function_type_list (void_type_node, const_ptr_type_node,
1628 ptr_type_node, unsigned_type_node,
1632 fn = build_decl (FUNCTION_DECL, fn, args);
1633 DECL_EXTERNAL (fn) = 1;
1634 TREE_PUBLIC (fn) = 1;
1635 DECL_ARTIFICIAL (fn) = 1;
1636 TREE_NOTHROW (fn) = 1;
1643 SET_DECL_RTL (block_move_fn, NULL_RTX);
1644 SET_DECL_ASSEMBLER_NAME (block_move_fn, get_identifier (asmspec));
1649 emit_block_move_libcall_fn (int for_call)
1651 static bool emitted_extern;
1654 init_block_move_fn (NULL);
1656 if (for_call && !emitted_extern)
1658 emitted_extern = true;
1659 make_decl_rtl (block_move_fn, NULL);
1660 assemble_external (block_move_fn);
1663 return block_move_fn;
1666 /* A subroutine of emit_block_move. Copy the data via an explicit
1667 loop. This is used only when libcalls are forbidden. */
1668 /* ??? It'd be nice to copy in hunks larger than QImode. */
1671 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1672 unsigned int align ATTRIBUTE_UNUSED)
1674 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1675 enum machine_mode iter_mode;
1677 iter_mode = GET_MODE (size);
1678 if (iter_mode == VOIDmode)
1679 iter_mode = word_mode;
1681 top_label = gen_label_rtx ();
1682 cmp_label = gen_label_rtx ();
1683 iter = gen_reg_rtx (iter_mode);
1685 emit_move_insn (iter, const0_rtx);
1687 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1688 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1689 do_pending_stack_adjust ();
1691 emit_note (NOTE_INSN_LOOP_BEG);
1693 emit_jump (cmp_label);
1694 emit_label (top_label);
1696 tmp = convert_modes (Pmode, iter_mode, iter, true);
1697 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1698 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1699 x = change_address (x, QImode, x_addr);
1700 y = change_address (y, QImode, y_addr);
1702 emit_move_insn (x, y);
1704 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1705 true, OPTAB_LIB_WIDEN);
1707 emit_move_insn (iter, tmp);
1709 emit_note (NOTE_INSN_LOOP_CONT);
1710 emit_label (cmp_label);
1712 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1715 emit_note (NOTE_INSN_LOOP_END);
1718 /* Copy all or part of a value X into registers starting at REGNO.
1719 The number of registers to be filled is NREGS. */
1722 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1725 #ifdef HAVE_load_multiple
1733 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1734 x = validize_mem (force_const_mem (mode, x));
1736 /* See if the machine can do this with a load multiple insn. */
1737 #ifdef HAVE_load_multiple
1738 if (HAVE_load_multiple)
1740 last = get_last_insn ();
1741 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1749 delete_insns_since (last);
1753 for (i = 0; i < nregs; i++)
1754 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1755 operand_subword_force (x, i, mode));
1758 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1759 The number of registers to be filled is NREGS. */
1762 move_block_from_reg (int regno, rtx x, int nregs)
1769 /* See if the machine can do this with a store multiple insn. */
1770 #ifdef HAVE_store_multiple
1771 if (HAVE_store_multiple)
1773 rtx last = get_last_insn ();
1774 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1782 delete_insns_since (last);
1786 for (i = 0; i < nregs; i++)
1788 rtx tem = operand_subword (x, i, 1, BLKmode);
1793 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1797 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1798 ORIG, where ORIG is a non-consecutive group of registers represented by
1799 a PARALLEL. The clone is identical to the original except in that the
1800 original set of registers is replaced by a new set of pseudo registers.
1801 The new set has the same modes as the original set. */
1804 gen_group_rtx (rtx orig)
1809 if (GET_CODE (orig) != PARALLEL)
1812 length = XVECLEN (orig, 0);
1813 tmps = alloca (sizeof (rtx) * length);
1815 /* Skip a NULL entry in first slot. */
1816 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1821 for (; i < length; i++)
1823 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1824 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1826 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1829 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1832 /* Emit code to move a block ORIG_SRC of type TYPE to a block DST,
1833 where DST is non-consecutive registers represented by a PARALLEL.
1834 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1838 emit_group_load (rtx dst, rtx orig_src, tree type ATTRIBUTE_UNUSED, int ssize)
1843 if (GET_CODE (dst) != PARALLEL)
1846 /* Check for a NULL entry, used to indicate that the parameter goes
1847 both on the stack and in registers. */
1848 if (XEXP (XVECEXP (dst, 0, 0), 0))
1853 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
1855 /* Process the pieces. */
1856 for (i = start; i < XVECLEN (dst, 0); i++)
1858 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1859 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1860 unsigned int bytelen = GET_MODE_SIZE (mode);
1863 /* Handle trailing fragments that run over the size of the struct. */
1864 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1866 /* Arrange to shift the fragment to where it belongs.
1867 extract_bit_field loads to the lsb of the reg. */
1869 #ifdef BLOCK_REG_PADDING
1870 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1871 == (BYTES_BIG_ENDIAN ? upward : downward)
1876 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1877 bytelen = ssize - bytepos;
1882 /* If we won't be loading directly from memory, protect the real source
1883 from strange tricks we might play; but make sure that the source can
1884 be loaded directly into the destination. */
1886 if (GET_CODE (orig_src) != MEM
1887 && (!CONSTANT_P (orig_src)
1888 || (GET_MODE (orig_src) != mode
1889 && GET_MODE (orig_src) != VOIDmode)))
1891 if (GET_MODE (orig_src) == VOIDmode)
1892 src = gen_reg_rtx (mode);
1894 src = gen_reg_rtx (GET_MODE (orig_src));
1896 emit_move_insn (src, orig_src);
1899 /* Optimize the access just a bit. */
1900 if (GET_CODE (src) == MEM
1901 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1902 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1903 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1904 && bytelen == GET_MODE_SIZE (mode))
1906 tmps[i] = gen_reg_rtx (mode);
1907 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1909 else if (GET_CODE (src) == CONCAT)
1911 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1912 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1914 if ((bytepos == 0 && bytelen == slen0)
1915 || (bytepos != 0 && bytepos + bytelen <= slen))
1917 /* The following assumes that the concatenated objects all
1918 have the same size. In this case, a simple calculation
1919 can be used to determine the object and the bit field
1921 tmps[i] = XEXP (src, bytepos / slen0);
1922 if (! CONSTANT_P (tmps[i])
1923 && (GET_CODE (tmps[i]) != REG || GET_MODE (tmps[i]) != mode))
1924 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1925 (bytepos % slen0) * BITS_PER_UNIT,
1926 1, NULL_RTX, mode, mode, ssize);
1928 else if (bytepos == 0)
1930 rtx mem = assign_stack_temp (GET_MODE (src), slen, 0);
1931 emit_move_insn (mem, src);
1932 tmps[i] = adjust_address (mem, mode, 0);
1937 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1938 SIMD register, which is currently broken. While we get GCC
1939 to emit proper RTL for these cases, let's dump to memory. */
1940 else if (VECTOR_MODE_P (GET_MODE (dst))
1941 && GET_CODE (src) == REG)
1943 int slen = GET_MODE_SIZE (GET_MODE (src));
1946 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1947 emit_move_insn (mem, src);
1948 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1950 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1951 && XVECLEN (dst, 0) > 1)
1952 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1953 else if (CONSTANT_P (src)
1954 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
1957 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1958 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1962 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
1963 tmps[i], 0, OPTAB_WIDEN);
1968 /* Copy the extracted pieces into the proper (probable) hard regs. */
1969 for (i = start; i < XVECLEN (dst, 0); i++)
1970 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
1973 /* Emit code to move a block SRC to block DST, where SRC and DST are
1974 non-consecutive groups of registers, each represented by a PARALLEL. */
1977 emit_group_move (rtx dst, rtx src)
1981 if (GET_CODE (src) != PARALLEL
1982 || GET_CODE (dst) != PARALLEL
1983 || XVECLEN (src, 0) != XVECLEN (dst, 0))
1986 /* Skip first entry if NULL. */
1987 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1988 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1989 XEXP (XVECEXP (src, 0, i), 0));
1992 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1993 where SRC is non-consecutive registers represented by a PARALLEL.
1994 SSIZE represents the total size of block ORIG_DST, or -1 if not
1998 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
2003 if (GET_CODE (src) != PARALLEL)
2006 /* Check for a NULL entry, used to indicate that the parameter goes
2007 both on the stack and in registers. */
2008 if (XEXP (XVECEXP (src, 0, 0), 0))
2013 tmps = alloca (sizeof (rtx) * XVECLEN (src, 0));
2015 /* Copy the (probable) hard regs into pseudos. */
2016 for (i = start; i < XVECLEN (src, 0); i++)
2018 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2019 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2020 emit_move_insn (tmps[i], reg);
2024 /* If we won't be storing directly into memory, protect the real destination
2025 from strange tricks we might play. */
2027 if (GET_CODE (dst) == PARALLEL)
2031 /* We can get a PARALLEL dst if there is a conditional expression in
2032 a return statement. In that case, the dst and src are the same,
2033 so no action is necessary. */
2034 if (rtx_equal_p (dst, src))
2037 /* It is unclear if we can ever reach here, but we may as well handle
2038 it. Allocate a temporary, and split this into a store/load to/from
2041 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2042 emit_group_store (temp, src, type, ssize);
2043 emit_group_load (dst, temp, type, ssize);
2046 else if (GET_CODE (dst) != MEM && GET_CODE (dst) != CONCAT)
2048 dst = gen_reg_rtx (GET_MODE (orig_dst));
2049 /* Make life a bit easier for combine. */
2050 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
2053 /* Process the pieces. */
2054 for (i = start; i < XVECLEN (src, 0); i++)
2056 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2057 enum machine_mode mode = GET_MODE (tmps[i]);
2058 unsigned int bytelen = GET_MODE_SIZE (mode);
2061 /* Handle trailing fragments that run over the size of the struct. */
2062 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2064 /* store_bit_field always takes its value from the lsb.
2065 Move the fragment to the lsb if it's not already there. */
2067 #ifdef BLOCK_REG_PADDING
2068 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2069 == (BYTES_BIG_ENDIAN ? upward : downward)
2075 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2076 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2077 tmps[i], 0, OPTAB_WIDEN);
2079 bytelen = ssize - bytepos;
2082 if (GET_CODE (dst) == CONCAT)
2084 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2085 dest = XEXP (dst, 0);
2086 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2088 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2089 dest = XEXP (dst, 1);
2091 else if (bytepos == 0 && XVECLEN (src, 0))
2093 dest = assign_stack_temp (GET_MODE (dest),
2094 GET_MODE_SIZE (GET_MODE (dest)), 0);
2095 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2104 /* Optimize the access just a bit. */
2105 if (GET_CODE (dest) == MEM
2106 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2107 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2108 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2109 && bytelen == GET_MODE_SIZE (mode))
2110 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2112 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2113 mode, tmps[i], ssize);
2118 /* Copy from the pseudo into the (probable) hard reg. */
2119 if (orig_dst != dst)
2120 emit_move_insn (orig_dst, dst);
2123 /* Generate code to copy a BLKmode object of TYPE out of a
2124 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2125 is null, a stack temporary is created. TGTBLK is returned.
2127 The purpose of this routine is to handle functions that return
2128 BLKmode structures in registers. Some machines (the PA for example)
2129 want to return all small structures in registers regardless of the
2130 structure's alignment. */
2133 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2135 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2136 rtx src = NULL, dst = NULL;
2137 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2138 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2142 tgtblk = assign_temp (build_qualified_type (type,
2144 | TYPE_QUAL_CONST)),
2146 preserve_temp_slots (tgtblk);
2149 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2150 into a new pseudo which is a full word. */
2152 if (GET_MODE (srcreg) != BLKmode
2153 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2154 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2156 /* If the structure doesn't take up a whole number of words, see whether
2157 SRCREG is padded on the left or on the right. If it's on the left,
2158 set PADDING_CORRECTION to the number of bits to skip.
2160 In most ABIs, the structure will be returned at the least end of
2161 the register, which translates to right padding on little-endian
2162 targets and left padding on big-endian targets. The opposite
2163 holds if the structure is returned at the most significant
2164 end of the register. */
2165 if (bytes % UNITS_PER_WORD != 0
2166 && (targetm.calls.return_in_msb (type)
2168 : BYTES_BIG_ENDIAN))
2170 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2172 /* Copy the structure BITSIZE bites at a time.
2174 We could probably emit more efficient code for machines which do not use
2175 strict alignment, but it doesn't seem worth the effort at the current
2177 for (bitpos = 0, xbitpos = padding_correction;
2178 bitpos < bytes * BITS_PER_UNIT;
2179 bitpos += bitsize, xbitpos += bitsize)
2181 /* We need a new source operand each time xbitpos is on a
2182 word boundary and when xbitpos == padding_correction
2183 (the first time through). */
2184 if (xbitpos % BITS_PER_WORD == 0
2185 || xbitpos == padding_correction)
2186 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2189 /* We need a new destination operand each time bitpos is on
2191 if (bitpos % BITS_PER_WORD == 0)
2192 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2194 /* Use xbitpos for the source extraction (right justified) and
2195 xbitpos for the destination store (left justified). */
2196 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2197 extract_bit_field (src, bitsize,
2198 xbitpos % BITS_PER_WORD, 1,
2199 NULL_RTX, word_mode, word_mode,
2207 /* Add a USE expression for REG to the (possibly empty) list pointed
2208 to by CALL_FUSAGE. REG must denote a hard register. */
2211 use_reg (rtx *call_fusage, rtx reg)
2213 if (GET_CODE (reg) != REG
2214 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2218 = gen_rtx_EXPR_LIST (VOIDmode,
2219 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2222 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2223 starting at REGNO. All of these registers must be hard registers. */
2226 use_regs (rtx *call_fusage, int regno, int nregs)
2230 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2233 for (i = 0; i < nregs; i++)
2234 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2237 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2238 PARALLEL REGS. This is for calls that pass values in multiple
2239 non-contiguous locations. The Irix 6 ABI has examples of this. */
2242 use_group_regs (rtx *call_fusage, rtx regs)
2246 for (i = 0; i < XVECLEN (regs, 0); i++)
2248 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2250 /* A NULL entry means the parameter goes both on the stack and in
2251 registers. This can also be a MEM for targets that pass values
2252 partially on the stack and partially in registers. */
2253 if (reg != 0 && GET_CODE (reg) == REG)
2254 use_reg (call_fusage, reg);
2259 /* Determine whether the LEN bytes generated by CONSTFUN can be
2260 stored to memory using several move instructions. CONSTFUNDATA is
2261 a pointer which will be passed as argument in every CONSTFUN call.
2262 ALIGN is maximum alignment we can assume. Return nonzero if a
2263 call to store_by_pieces should succeed. */
2266 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2267 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2268 void *constfundata, unsigned int align)
2270 unsigned HOST_WIDE_INT max_size, l;
2271 HOST_WIDE_INT offset = 0;
2272 enum machine_mode mode, tmode;
2273 enum insn_code icode;
2280 if (! STORE_BY_PIECES_P (len, align))
2283 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2284 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2285 align = MOVE_MAX * BITS_PER_UNIT;
2287 /* We would first store what we can in the largest integer mode, then go to
2288 successively smaller modes. */
2291 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2296 max_size = STORE_MAX_PIECES + 1;
2297 while (max_size > 1)
2299 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2300 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2301 if (GET_MODE_SIZE (tmode) < max_size)
2304 if (mode == VOIDmode)
2307 icode = mov_optab->handlers[(int) mode].insn_code;
2308 if (icode != CODE_FOR_nothing
2309 && align >= GET_MODE_ALIGNMENT (mode))
2311 unsigned int size = GET_MODE_SIZE (mode);
2318 cst = (*constfun) (constfundata, offset, mode);
2319 if (!LEGITIMATE_CONSTANT_P (cst))
2329 max_size = GET_MODE_SIZE (mode);
2332 /* The code above should have handled everything. */
2340 /* Generate several move instructions to store LEN bytes generated by
2341 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2342 pointer which will be passed as argument in every CONSTFUN call.
2343 ALIGN is maximum alignment we can assume.
2344 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2345 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2349 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2350 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2351 void *constfundata, unsigned int align, int endp)
2353 struct store_by_pieces data;
2362 if (! STORE_BY_PIECES_P (len, align))
2364 to = protect_from_queue (to, 1);
2365 data.constfun = constfun;
2366 data.constfundata = constfundata;
2369 store_by_pieces_1 (&data, align);
2380 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2381 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2383 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2386 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2393 to1 = adjust_address (data.to, QImode, data.offset);
2401 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2402 rtx with BLKmode). The caller must pass TO through protect_from_queue
2403 before calling. ALIGN is maximum alignment we can assume. */
2406 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2408 struct store_by_pieces data;
2413 data.constfun = clear_by_pieces_1;
2414 data.constfundata = NULL;
2417 store_by_pieces_1 (&data, align);
2420 /* Callback routine for clear_by_pieces.
2421 Return const0_rtx unconditionally. */
2424 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2425 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2426 enum machine_mode mode ATTRIBUTE_UNUSED)
2431 /* Subroutine of clear_by_pieces and store_by_pieces.
2432 Generate several move instructions to store LEN bytes of block TO. (A MEM
2433 rtx with BLKmode). The caller must pass TO through protect_from_queue
2434 before calling. ALIGN is maximum alignment we can assume. */
2437 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2438 unsigned int align ATTRIBUTE_UNUSED)
2440 rtx to_addr = XEXP (data->to, 0);
2441 unsigned HOST_WIDE_INT max_size = STORE_MAX_PIECES + 1;
2442 enum machine_mode mode = VOIDmode, tmode;
2443 enum insn_code icode;
2446 data->to_addr = to_addr;
2448 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2449 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2451 data->explicit_inc_to = 0;
2453 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2455 data->offset = data->len;
2457 /* If storing requires more than two move insns,
2458 copy addresses to registers (to make displacements shorter)
2459 and use post-increment if available. */
2460 if (!data->autinc_to
2461 && move_by_pieces_ninsns (data->len, align) > 2)
2463 /* Determine the main mode we'll be using. */
2464 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2465 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2466 if (GET_MODE_SIZE (tmode) < max_size)
2469 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2471 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2472 data->autinc_to = 1;
2473 data->explicit_inc_to = -1;
2476 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2477 && ! data->autinc_to)
2479 data->to_addr = copy_addr_to_reg (to_addr);
2480 data->autinc_to = 1;
2481 data->explicit_inc_to = 1;
2484 if ( !data->autinc_to && CONSTANT_P (to_addr))
2485 data->to_addr = copy_addr_to_reg (to_addr);
2488 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2489 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2490 align = MOVE_MAX * BITS_PER_UNIT;
2492 /* First store what we can in the largest integer mode, then go to
2493 successively smaller modes. */
2495 while (max_size > 1)
2497 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2498 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2499 if (GET_MODE_SIZE (tmode) < max_size)
2502 if (mode == VOIDmode)
2505 icode = mov_optab->handlers[(int) mode].insn_code;
2506 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2507 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2509 max_size = GET_MODE_SIZE (mode);
2512 /* The code above should have handled everything. */
2517 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2518 with move instructions for mode MODE. GENFUN is the gen_... function
2519 to make a move insn for that mode. DATA has all the other info. */
2522 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2523 struct store_by_pieces *data)
2525 unsigned int size = GET_MODE_SIZE (mode);
2528 while (data->len >= size)
2531 data->offset -= size;
2533 if (data->autinc_to)
2534 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2537 to1 = adjust_address (data->to, mode, data->offset);
2539 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2540 emit_insn (gen_add2_insn (data->to_addr,
2541 GEN_INT (-(HOST_WIDE_INT) size)));
2543 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2544 emit_insn ((*genfun) (to1, cst));
2546 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2547 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2549 if (! data->reverse)
2550 data->offset += size;
2556 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2557 its length in bytes. */
2560 clear_storage (rtx object, rtx size)
2563 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2564 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2566 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2567 just move a zero. Otherwise, do this a piece at a time. */
2568 if (GET_MODE (object) != BLKmode
2569 && GET_CODE (size) == CONST_INT
2570 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
2571 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2574 object = protect_from_queue (object, 1);
2575 size = protect_from_queue (size, 0);
2577 if (size == const0_rtx)
2579 else if (GET_CODE (size) == CONST_INT
2580 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2581 clear_by_pieces (object, INTVAL (size), align);
2582 else if (clear_storage_via_clrstr (object, size, align))
2585 retval = clear_storage_via_libcall (object, size);
2591 /* A subroutine of clear_storage. Expand a clrstr pattern;
2592 return true if successful. */
2595 clear_storage_via_clrstr (rtx object, rtx size, unsigned int align)
2597 /* Try the most limited insn first, because there's no point
2598 including more than one in the machine description unless
2599 the more limited one has some advantage. */
2601 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2602 enum machine_mode mode;
2604 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2605 mode = GET_MODE_WIDER_MODE (mode))
2607 enum insn_code code = clrstr_optab[(int) mode];
2608 insn_operand_predicate_fn pred;
2610 if (code != CODE_FOR_nothing
2611 /* We don't need MODE to be narrower than
2612 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2613 the mode mask, as it is returned by the macro, it will
2614 definitely be less than the actual mode mask. */
2615 && ((GET_CODE (size) == CONST_INT
2616 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2617 <= (GET_MODE_MASK (mode) >> 1)))
2618 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2619 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2620 || (*pred) (object, BLKmode))
2621 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2622 || (*pred) (opalign, VOIDmode)))
2625 rtx last = get_last_insn ();
2628 op1 = convert_to_mode (mode, size, 1);
2629 pred = insn_data[(int) code].operand[1].predicate;
2630 if (pred != 0 && ! (*pred) (op1, mode))
2631 op1 = copy_to_mode_reg (mode, op1);
2633 pat = GEN_FCN ((int) code) (object, op1, opalign);
2640 delete_insns_since (last);
2647 /* A subroutine of clear_storage. Expand a call to memset or bzero.
2648 Return the return value of memset, 0 otherwise. */
2651 clear_storage_via_libcall (rtx object, rtx size)
2653 tree call_expr, arg_list, fn, object_tree, size_tree;
2654 enum machine_mode size_mode;
2657 /* OBJECT or SIZE may have been passed through protect_from_queue.
2659 It is unsafe to save the value generated by protect_from_queue
2660 and reuse it later. Consider what happens if emit_queue is
2661 called before the return value from protect_from_queue is used.
2663 Expansion of the CALL_EXPR below will call emit_queue before
2664 we are finished emitting RTL for argument setup. So if we are
2665 not careful we could get the wrong value for an argument.
2667 To avoid this problem we go ahead and emit code to copy OBJECT
2668 and SIZE into new pseudos. We can then place those new pseudos
2669 into an RTL_EXPR and use them later, even after a call to
2672 Note this is not strictly needed for library calls since they
2673 do not call emit_queue before loading their arguments. However,
2674 we may need to have library calls call emit_queue in the future
2675 since failing to do so could cause problems for targets which
2676 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2678 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2680 if (TARGET_MEM_FUNCTIONS)
2681 size_mode = TYPE_MODE (sizetype);
2683 size_mode = TYPE_MODE (unsigned_type_node);
2684 size = convert_to_mode (size_mode, size, 1);
2685 size = copy_to_mode_reg (size_mode, size);
2687 /* It is incorrect to use the libcall calling conventions to call
2688 memset in this context. This could be a user call to memset and
2689 the user may wish to examine the return value from memset. For
2690 targets where libcalls and normal calls have different conventions
2691 for returning pointers, we could end up generating incorrect code.
2693 For convenience, we generate the call to bzero this way as well. */
2695 object_tree = make_tree (ptr_type_node, object);
2696 if (TARGET_MEM_FUNCTIONS)
2697 size_tree = make_tree (sizetype, size);
2699 size_tree = make_tree (unsigned_type_node, size);
2701 fn = clear_storage_libcall_fn (true);
2702 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
2703 if (TARGET_MEM_FUNCTIONS)
2704 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
2705 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
2707 /* Now we have to build up the CALL_EXPR itself. */
2708 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2709 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2710 call_expr, arg_list, NULL_TREE);
2712 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2714 /* If we are initializing a readonly value, show the above call
2715 clobbered it. Otherwise, a load from it may erroneously be
2716 hoisted from a loop. */
2717 if (RTX_UNCHANGING_P (object))
2718 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
2720 return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
2723 /* A subroutine of clear_storage_via_libcall. Create the tree node
2724 for the function we use for block clears. The first time FOR_CALL
2725 is true, we call assemble_external. */
2727 static GTY(()) tree block_clear_fn;
2730 init_block_clear_fn (const char *asmspec)
2732 if (!block_clear_fn)
2736 if (TARGET_MEM_FUNCTIONS)
2738 fn = get_identifier ("memset");
2739 args = build_function_type_list (ptr_type_node, ptr_type_node,
2740 integer_type_node, sizetype,
2745 fn = get_identifier ("bzero");
2746 args = build_function_type_list (void_type_node, ptr_type_node,
2747 unsigned_type_node, NULL_TREE);
2750 fn = build_decl (FUNCTION_DECL, fn, args);
2751 DECL_EXTERNAL (fn) = 1;
2752 TREE_PUBLIC (fn) = 1;
2753 DECL_ARTIFICIAL (fn) = 1;
2754 TREE_NOTHROW (fn) = 1;
2756 block_clear_fn = fn;
2761 SET_DECL_RTL (block_clear_fn, NULL_RTX);
2762 SET_DECL_ASSEMBLER_NAME (block_clear_fn, get_identifier (asmspec));
2767 clear_storage_libcall_fn (int for_call)
2769 static bool emitted_extern;
2771 if (!block_clear_fn)
2772 init_block_clear_fn (NULL);
2774 if (for_call && !emitted_extern)
2776 emitted_extern = true;
2777 make_decl_rtl (block_clear_fn, NULL);
2778 assemble_external (block_clear_fn);
2781 return block_clear_fn;
2784 /* Generate code to copy Y into X.
2785 Both Y and X must have the same mode, except that
2786 Y can be a constant with VOIDmode.
2787 This mode cannot be BLKmode; use emit_block_move for that.
2789 Return the last instruction emitted. */
2792 emit_move_insn (rtx x, rtx y)
2794 enum machine_mode mode = GET_MODE (x);
2795 rtx y_cst = NULL_RTX;
2798 x = protect_from_queue (x, 1);
2799 y = protect_from_queue (y, 0);
2801 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2804 /* Never force constant_p_rtx to memory. */
2805 if (GET_CODE (y) == CONSTANT_P_RTX)
2807 else if (CONSTANT_P (y))
2810 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
2811 && (last_insn = compress_float_constant (x, y)))
2816 if (!LEGITIMATE_CONSTANT_P (y))
2818 y = force_const_mem (mode, y);
2820 /* If the target's cannot_force_const_mem prevented the spill,
2821 assume that the target's move expanders will also take care
2822 of the non-legitimate constant. */
2828 /* If X or Y are memory references, verify that their addresses are valid
2830 if (GET_CODE (x) == MEM
2831 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2832 && ! push_operand (x, GET_MODE (x)))
2834 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2835 x = validize_mem (x);
2837 if (GET_CODE (y) == MEM
2838 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2840 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2841 y = validize_mem (y);
2843 if (mode == BLKmode)
2846 last_insn = emit_move_insn_1 (x, y);
2848 if (y_cst && GET_CODE (x) == REG
2849 && (set = single_set (last_insn)) != NULL_RTX
2850 && SET_DEST (set) == x
2851 && ! rtx_equal_p (y_cst, SET_SRC (set)))
2852 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
2857 /* Low level part of emit_move_insn.
2858 Called just like emit_move_insn, but assumes X and Y
2859 are basically valid. */
2862 emit_move_insn_1 (rtx x, rtx y)
2864 enum machine_mode mode = GET_MODE (x);
2865 enum machine_mode submode;
2866 enum mode_class class = GET_MODE_CLASS (mode);
2868 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
2871 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2873 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2875 /* Expand complex moves by moving real part and imag part, if possible. */
2876 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2877 && BLKmode != (submode = GET_MODE_INNER (mode))
2878 && (mov_optab->handlers[(int) submode].insn_code
2879 != CODE_FOR_nothing))
2881 /* Don't split destination if it is a stack push. */
2882 int stack = push_operand (x, GET_MODE (x));
2884 #ifdef PUSH_ROUNDING
2885 /* In case we output to the stack, but the size is smaller than the
2886 machine can push exactly, we need to use move instructions. */
2888 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
2889 != GET_MODE_SIZE (submode)))
2892 HOST_WIDE_INT offset1, offset2;
2894 /* Do not use anti_adjust_stack, since we don't want to update
2895 stack_pointer_delta. */
2896 temp = expand_binop (Pmode,
2897 #ifdef STACK_GROWS_DOWNWARD
2905 (GET_MODE_SIZE (GET_MODE (x)))),
2906 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
2908 if (temp != stack_pointer_rtx)
2909 emit_move_insn (stack_pointer_rtx, temp);
2911 #ifdef STACK_GROWS_DOWNWARD
2913 offset2 = GET_MODE_SIZE (submode);
2915 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2916 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2917 + GET_MODE_SIZE (submode));
2920 emit_move_insn (change_address (x, submode,
2921 gen_rtx_PLUS (Pmode,
2923 GEN_INT (offset1))),
2924 gen_realpart (submode, y));
2925 emit_move_insn (change_address (x, submode,
2926 gen_rtx_PLUS (Pmode,
2928 GEN_INT (offset2))),
2929 gen_imagpart (submode, y));
2933 /* If this is a stack, push the highpart first, so it
2934 will be in the argument order.
2936 In that case, change_address is used only to convert
2937 the mode, not to change the address. */
2940 /* Note that the real part always precedes the imag part in memory
2941 regardless of machine's endianness. */
2942 #ifdef STACK_GROWS_DOWNWARD
2943 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2944 gen_imagpart (submode, y));
2945 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2946 gen_realpart (submode, y));
2948 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2949 gen_realpart (submode, y));
2950 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2951 gen_imagpart (submode, y));
2956 rtx realpart_x, realpart_y;
2957 rtx imagpart_x, imagpart_y;
2959 /* If this is a complex value with each part being smaller than a
2960 word, the usual calling sequence will likely pack the pieces into
2961 a single register. Unfortunately, SUBREG of hard registers only
2962 deals in terms of words, so we have a problem converting input
2963 arguments to the CONCAT of two registers that is used elsewhere
2964 for complex values. If this is before reload, we can copy it into
2965 memory and reload. FIXME, we should see about using extract and
2966 insert on integer registers, but complex short and complex char
2967 variables should be rarely used. */
2968 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2969 && (reload_in_progress | reload_completed) == 0)
2972 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2974 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2976 if (packed_dest_p || packed_src_p)
2978 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2979 ? MODE_FLOAT : MODE_INT);
2981 enum machine_mode reg_mode
2982 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2984 if (reg_mode != BLKmode)
2986 rtx mem = assign_stack_temp (reg_mode,
2987 GET_MODE_SIZE (mode), 0);
2988 rtx cmem = adjust_address (mem, mode, 0);
2991 = N_("function using short complex types cannot be inline");
2995 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2997 emit_move_insn_1 (cmem, y);
2998 return emit_move_insn_1 (sreg, mem);
3002 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
3004 emit_move_insn_1 (mem, sreg);
3005 return emit_move_insn_1 (x, cmem);
3011 realpart_x = gen_realpart (submode, x);
3012 realpart_y = gen_realpart (submode, y);
3013 imagpart_x = gen_imagpart (submode, x);
3014 imagpart_y = gen_imagpart (submode, y);
3016 /* Show the output dies here. This is necessary for SUBREGs
3017 of pseudos since we cannot track their lifetimes correctly;
3018 hard regs shouldn't appear here except as return values.
3019 We never want to emit such a clobber after reload. */
3021 && ! (reload_in_progress || reload_completed)
3022 && (GET_CODE (realpart_x) == SUBREG
3023 || GET_CODE (imagpart_x) == SUBREG))
3024 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3026 emit_move_insn (realpart_x, realpart_y);
3027 emit_move_insn (imagpart_x, imagpart_y);
3030 return get_last_insn ();
3033 /* Handle MODE_CC modes: If we don't have a special move insn for this mode,
3034 find a mode to do it in. If we have a movcc, use it. Otherwise,
3035 find the MODE_INT mode of the same width. */
3036 else if (GET_MODE_CLASS (mode) == MODE_CC
3037 && mov_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
3039 enum insn_code insn_code;
3040 enum machine_mode tmode = VOIDmode;
3044 && mov_optab->handlers[(int) CCmode].insn_code != CODE_FOR_nothing)
3047 for (tmode = QImode; tmode != VOIDmode;
3048 tmode = GET_MODE_WIDER_MODE (tmode))
3049 if (GET_MODE_SIZE (tmode) == GET_MODE_SIZE (mode))
3052 if (tmode == VOIDmode)
3055 /* Get X and Y in TMODE. We can't use gen_lowpart here because it
3056 may call change_address which is not appropriate if we were
3057 called when a reload was in progress. We don't have to worry
3058 about changing the address since the size in bytes is supposed to
3059 be the same. Copy the MEM to change the mode and move any
3060 substitutions from the old MEM to the new one. */
3062 if (reload_in_progress)
3064 x = gen_lowpart_common (tmode, x1);
3065 if (x == 0 && GET_CODE (x1) == MEM)
3067 x = adjust_address_nv (x1, tmode, 0);
3068 copy_replacements (x1, x);
3071 y = gen_lowpart_common (tmode, y1);
3072 if (y == 0 && GET_CODE (y1) == MEM)
3074 y = adjust_address_nv (y1, tmode, 0);
3075 copy_replacements (y1, y);
3080 x = gen_lowpart (tmode, x);
3081 y = gen_lowpart (tmode, y);
3084 insn_code = mov_optab->handlers[(int) tmode].insn_code;
3085 return emit_insn (GEN_FCN (insn_code) (x, y));
3088 /* Try using a move pattern for the corresponding integer mode. This is
3089 only safe when simplify_subreg can convert MODE constants into integer
3090 constants. At present, it can only do this reliably if the value
3091 fits within a HOST_WIDE_INT. */
3092 else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3093 && (submode = int_mode_for_mode (mode)) != BLKmode
3094 && mov_optab->handlers[submode].insn_code != CODE_FOR_nothing)
3095 return emit_insn (GEN_FCN (mov_optab->handlers[submode].insn_code)
3096 (simplify_gen_subreg (submode, x, mode, 0),
3097 simplify_gen_subreg (submode, y, mode, 0)));
3099 /* This will handle any multi-word or full-word mode that lacks a move_insn
3100 pattern. However, you will get better code if you define such patterns,
3101 even if they must turn into multiple assembler instructions. */
3102 else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
3109 #ifdef PUSH_ROUNDING
3111 /* If X is a push on the stack, do the push now and replace
3112 X with a reference to the stack pointer. */
3113 if (push_operand (x, GET_MODE (x)))
3118 /* Do not use anti_adjust_stack, since we don't want to update
3119 stack_pointer_delta. */
3120 temp = expand_binop (Pmode,
3121 #ifdef STACK_GROWS_DOWNWARD
3129 (GET_MODE_SIZE (GET_MODE (x)))),
3130 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3132 if (temp != stack_pointer_rtx)
3133 emit_move_insn (stack_pointer_rtx, temp);
3135 code = GET_CODE (XEXP (x, 0));
3137 /* Just hope that small offsets off SP are OK. */
3138 if (code == POST_INC)
3139 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3140 GEN_INT (-((HOST_WIDE_INT)
3141 GET_MODE_SIZE (GET_MODE (x)))));
3142 else if (code == POST_DEC)
3143 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3144 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3146 temp = stack_pointer_rtx;
3148 x = change_address (x, VOIDmode, temp);
3152 /* If we are in reload, see if either operand is a MEM whose address
3153 is scheduled for replacement. */
3154 if (reload_in_progress && GET_CODE (x) == MEM
3155 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3156 x = replace_equiv_address_nv (x, inner);
3157 if (reload_in_progress && GET_CODE (y) == MEM
3158 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3159 y = replace_equiv_address_nv (y, inner);
3165 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3168 rtx xpart = operand_subword (x, i, 1, mode);
3169 rtx ypart = operand_subword (y, i, 1, mode);
3171 /* If we can't get a part of Y, put Y into memory if it is a
3172 constant. Otherwise, force it into a register. If we still
3173 can't get a part of Y, abort. */
3174 if (ypart == 0 && CONSTANT_P (y))
3176 y = force_const_mem (mode, y);
3177 ypart = operand_subword (y, i, 1, mode);
3179 else if (ypart == 0)
3180 ypart = operand_subword_force (y, i, mode);
3182 if (xpart == 0 || ypart == 0)
3185 need_clobber |= (GET_CODE (xpart) == SUBREG);
3187 last_insn = emit_move_insn (xpart, ypart);
3193 /* Show the output dies here. This is necessary for SUBREGs
3194 of pseudos since we cannot track their lifetimes correctly;
3195 hard regs shouldn't appear here except as return values.
3196 We never want to emit such a clobber after reload. */
3198 && ! (reload_in_progress || reload_completed)
3199 && need_clobber != 0)
3200 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3210 /* If Y is representable exactly in a narrower mode, and the target can
3211 perform the extension directly from constant or memory, then emit the
3212 move as an extension. */
3215 compress_float_constant (rtx x, rtx y)
3217 enum machine_mode dstmode = GET_MODE (x);
3218 enum machine_mode orig_srcmode = GET_MODE (y);
3219 enum machine_mode srcmode;
3222 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3224 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3225 srcmode != orig_srcmode;
3226 srcmode = GET_MODE_WIDER_MODE (srcmode))
3229 rtx trunc_y, last_insn;
3231 /* Skip if the target can't extend this way. */
3232 ic = can_extend_p (dstmode, srcmode, 0);
3233 if (ic == CODE_FOR_nothing)
3236 /* Skip if the narrowed value isn't exact. */
3237 if (! exact_real_truncate (srcmode, &r))
3240 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3242 if (LEGITIMATE_CONSTANT_P (trunc_y))
3244 /* Skip if the target needs extra instructions to perform
3246 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3249 else if (float_extend_from_mem[dstmode][srcmode])
3250 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
3254 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3255 last_insn = get_last_insn ();
3257 if (GET_CODE (x) == REG)
3258 set_unique_reg_note (last_insn, REG_EQUAL, y);
3266 /* Pushing data onto the stack. */
3268 /* Push a block of length SIZE (perhaps variable)
3269 and return an rtx to address the beginning of the block.
3270 Note that it is not possible for the value returned to be a QUEUED.
3271 The value may be virtual_outgoing_args_rtx.
3273 EXTRA is the number of bytes of padding to push in addition to SIZE.
3274 BELOW nonzero means this padding comes at low addresses;
3275 otherwise, the padding comes at high addresses. */
3278 push_block (rtx size, int extra, int below)
3282 size = convert_modes (Pmode, ptr_mode, size, 1);
3283 if (CONSTANT_P (size))
3284 anti_adjust_stack (plus_constant (size, extra));
3285 else if (GET_CODE (size) == REG && extra == 0)
3286 anti_adjust_stack (size);
3289 temp = copy_to_mode_reg (Pmode, size);
3291 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3292 temp, 0, OPTAB_LIB_WIDEN);
3293 anti_adjust_stack (temp);
3296 #ifndef STACK_GROWS_DOWNWARD
3302 temp = virtual_outgoing_args_rtx;
3303 if (extra != 0 && below)
3304 temp = plus_constant (temp, extra);
3308 if (GET_CODE (size) == CONST_INT)
3309 temp = plus_constant (virtual_outgoing_args_rtx,
3310 -INTVAL (size) - (below ? 0 : extra));
3311 else if (extra != 0 && !below)
3312 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3313 negate_rtx (Pmode, plus_constant (size, extra)));
3315 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3316 negate_rtx (Pmode, size));
3319 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3322 #ifdef PUSH_ROUNDING
3324 /* Emit single push insn. */
3327 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3330 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3332 enum insn_code icode;
3333 insn_operand_predicate_fn pred;
3335 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3336 /* If there is push pattern, use it. Otherwise try old way of throwing
3337 MEM representing push operation to move expander. */
3338 icode = push_optab->handlers[(int) mode].insn_code;
3339 if (icode != CODE_FOR_nothing)
3341 if (((pred = insn_data[(int) icode].operand[0].predicate)
3342 && !((*pred) (x, mode))))
3343 x = force_reg (mode, x);
3344 emit_insn (GEN_FCN (icode) (x));
3347 if (GET_MODE_SIZE (mode) == rounded_size)
3348 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3349 /* If we are to pad downward, adjust the stack pointer first and
3350 then store X into the stack location using an offset. This is
3351 because emit_move_insn does not know how to pad; it does not have
3353 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3355 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3356 HOST_WIDE_INT offset;
3358 emit_move_insn (stack_pointer_rtx,
3359 expand_binop (Pmode,
3360 #ifdef STACK_GROWS_DOWNWARD
3366 GEN_INT (rounded_size),
3367 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3369 offset = (HOST_WIDE_INT) padding_size;
3370 #ifdef STACK_GROWS_DOWNWARD
3371 if (STACK_PUSH_CODE == POST_DEC)
3372 /* We have already decremented the stack pointer, so get the
3374 offset += (HOST_WIDE_INT) rounded_size;
3376 if (STACK_PUSH_CODE == POST_INC)
3377 /* We have already incremented the stack pointer, so get the
3379 offset -= (HOST_WIDE_INT) rounded_size;
3381 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3385 #ifdef STACK_GROWS_DOWNWARD
3386 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3387 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3388 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3390 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3391 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3392 GEN_INT (rounded_size));
3394 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3397 dest = gen_rtx_MEM (mode, dest_addr);
3401 set_mem_attributes (dest, type, 1);
3403 if (flag_optimize_sibling_calls)
3404 /* Function incoming arguments may overlap with sibling call
3405 outgoing arguments and we cannot allow reordering of reads
3406 from function arguments with stores to outgoing arguments
3407 of sibling calls. */
3408 set_mem_alias_set (dest, 0);
3410 emit_move_insn (dest, x);
3414 /* Generate code to push X onto the stack, assuming it has mode MODE and
3416 MODE is redundant except when X is a CONST_INT (since they don't
3418 SIZE is an rtx for the size of data to be copied (in bytes),
3419 needed only if X is BLKmode.
3421 ALIGN (in bits) is maximum alignment we can assume.
3423 If PARTIAL and REG are both nonzero, then copy that many of the first
3424 words of X into registers starting with REG, and push the rest of X.
3425 The amount of space pushed is decreased by PARTIAL words,
3426 rounded *down* to a multiple of PARM_BOUNDARY.
3427 REG must be a hard register in this case.
3428 If REG is zero but PARTIAL is not, take any all others actions for an
3429 argument partially in registers, but do not actually load any
3432 EXTRA is the amount in bytes of extra space to leave next to this arg.
3433 This is ignored if an argument block has already been allocated.
3435 On a machine that lacks real push insns, ARGS_ADDR is the address of
3436 the bottom of the argument block for this call. We use indexing off there
3437 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3438 argument block has not been preallocated.
3440 ARGS_SO_FAR is the size of args previously pushed for this call.
3442 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3443 for arguments passed in registers. If nonzero, it will be the number
3444 of bytes required. */
3447 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3448 unsigned int align, int partial, rtx reg, int extra,
3449 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3453 enum direction stack_direction
3454 #ifdef STACK_GROWS_DOWNWARD
3460 /* Decide where to pad the argument: `downward' for below,
3461 `upward' for above, or `none' for don't pad it.
3462 Default is below for small data on big-endian machines; else above. */
3463 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3465 /* Invert direction if stack is post-decrement.
3467 if (STACK_PUSH_CODE == POST_DEC)
3468 if (where_pad != none)
3469 where_pad = (where_pad == downward ? upward : downward);
3471 xinner = x = protect_from_queue (x, 0);
3473 if (mode == BLKmode)
3475 /* Copy a block into the stack, entirely or partially. */
3478 int used = partial * UNITS_PER_WORD;
3482 if (reg && GET_CODE (reg) == PARALLEL)
3484 /* Use the size of the elt to compute offset. */
3485 rtx elt = XEXP (XVECEXP (reg, 0, 0), 0);
3486 used = partial * GET_MODE_SIZE (GET_MODE (elt));
3487 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3490 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3497 /* USED is now the # of bytes we need not copy to the stack
3498 because registers will take care of them. */
3501 xinner = adjust_address (xinner, BLKmode, used);
3503 /* If the partial register-part of the arg counts in its stack size,
3504 skip the part of stack space corresponding to the registers.
3505 Otherwise, start copying to the beginning of the stack space,
3506 by setting SKIP to 0. */
3507 skip = (reg_parm_stack_space == 0) ? 0 : used;
3509 #ifdef PUSH_ROUNDING
3510 /* Do it with several push insns if that doesn't take lots of insns
3511 and if there is no difficulty with push insns that skip bytes
3512 on the stack for alignment purposes. */
3515 && GET_CODE (size) == CONST_INT
3517 && MEM_ALIGN (xinner) >= align
3518 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3519 /* Here we avoid the case of a structure whose weak alignment
3520 forces many pushes of a small amount of data,
3521 and such small pushes do rounding that causes trouble. */
3522 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3523 || align >= BIGGEST_ALIGNMENT
3524 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3525 == (align / BITS_PER_UNIT)))
3526 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3528 /* Push padding now if padding above and stack grows down,
3529 or if padding below and stack grows up.
3530 But if space already allocated, this has already been done. */
3531 if (extra && args_addr == 0
3532 && where_pad != none && where_pad != stack_direction)
3533 anti_adjust_stack (GEN_INT (extra));
3535 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3538 #endif /* PUSH_ROUNDING */
3542 /* Otherwise make space on the stack and copy the data
3543 to the address of that space. */
3545 /* Deduct words put into registers from the size we must copy. */
3548 if (GET_CODE (size) == CONST_INT)
3549 size = GEN_INT (INTVAL (size) - used);
3551 size = expand_binop (GET_MODE (size), sub_optab, size,
3552 GEN_INT (used), NULL_RTX, 0,
3556 /* Get the address of the stack space.
3557 In this case, we do not deal with EXTRA separately.
3558 A single stack adjust will do. */
3561 temp = push_block (size, extra, where_pad == downward);
3564 else if (GET_CODE (args_so_far) == CONST_INT)
3565 temp = memory_address (BLKmode,
3566 plus_constant (args_addr,
3567 skip + INTVAL (args_so_far)));
3569 temp = memory_address (BLKmode,
3570 plus_constant (gen_rtx_PLUS (Pmode,
3575 if (!ACCUMULATE_OUTGOING_ARGS)
3577 /* If the source is referenced relative to the stack pointer,
3578 copy it to another register to stabilize it. We do not need
3579 to do this if we know that we won't be changing sp. */
3581 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3582 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3583 temp = copy_to_reg (temp);
3586 target = gen_rtx_MEM (BLKmode, temp);
3590 set_mem_attributes (target, type, 1);
3591 /* Function incoming arguments may overlap with sibling call
3592 outgoing arguments and we cannot allow reordering of reads
3593 from function arguments with stores to outgoing arguments
3594 of sibling calls. */
3595 set_mem_alias_set (target, 0);
3598 /* ALIGN may well be better aligned than TYPE, e.g. due to
3599 PARM_BOUNDARY. Assume the caller isn't lying. */
3600 set_mem_align (target, align);
3602 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3605 else if (partial > 0)
3607 /* Scalar partly in registers. */
3609 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3612 /* # words of start of argument
3613 that we must make space for but need not store. */
3614 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3615 int args_offset = INTVAL (args_so_far);
3618 /* Push padding now if padding above and stack grows down,
3619 or if padding below and stack grows up.
3620 But if space already allocated, this has already been done. */
3621 if (extra && args_addr == 0
3622 && where_pad != none && where_pad != stack_direction)
3623 anti_adjust_stack (GEN_INT (extra));
3625 /* If we make space by pushing it, we might as well push
3626 the real data. Otherwise, we can leave OFFSET nonzero
3627 and leave the space uninitialized. */
3631 /* Now NOT_STACK gets the number of words that we don't need to
3632 allocate on the stack. */
3633 not_stack = partial - offset;
3635 /* If the partial register-part of the arg counts in its stack size,
3636 skip the part of stack space corresponding to the registers.
3637 Otherwise, start copying to the beginning of the stack space,
3638 by setting SKIP to 0. */
3639 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3641 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3642 x = validize_mem (force_const_mem (mode, x));
3644 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3645 SUBREGs of such registers are not allowed. */
3646 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3647 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3648 x = copy_to_reg (x);
3650 /* Loop over all the words allocated on the stack for this arg. */
3651 /* We can do it by words, because any scalar bigger than a word
3652 has a size a multiple of a word. */
3653 #ifndef PUSH_ARGS_REVERSED
3654 for (i = not_stack; i < size; i++)
3656 for (i = size - 1; i >= not_stack; i--)
3658 if (i >= not_stack + offset)
3659 emit_push_insn (operand_subword_force (x, i, mode),
3660 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3662 GEN_INT (args_offset + ((i - not_stack + skip)
3664 reg_parm_stack_space, alignment_pad);
3671 /* Push padding now if padding above and stack grows down,
3672 or if padding below and stack grows up.
3673 But if space already allocated, this has already been done. */
3674 if (extra && args_addr == 0
3675 && where_pad != none && where_pad != stack_direction)
3676 anti_adjust_stack (GEN_INT (extra));
3678 #ifdef PUSH_ROUNDING
3679 if (args_addr == 0 && PUSH_ARGS)
3680 emit_single_push_insn (mode, x, type);
3684 if (GET_CODE (args_so_far) == CONST_INT)
3686 = memory_address (mode,
3687 plus_constant (args_addr,
3688 INTVAL (args_so_far)));
3690 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3692 dest = gen_rtx_MEM (mode, addr);
3695 set_mem_attributes (dest, type, 1);
3696 /* Function incoming arguments may overlap with sibling call
3697 outgoing arguments and we cannot allow reordering of reads
3698 from function arguments with stores to outgoing arguments
3699 of sibling calls. */
3700 set_mem_alias_set (dest, 0);
3703 emit_move_insn (dest, x);
3707 /* If part should go in registers, copy that part
3708 into the appropriate registers. Do this now, at the end,
3709 since mem-to-mem copies above may do function calls. */
3710 if (partial > 0 && reg != 0)
3712 /* Handle calls that pass values in multiple non-contiguous locations.
3713 The Irix 6 ABI has examples of this. */
3714 if (GET_CODE (reg) == PARALLEL)
3715 emit_group_load (reg, x, type, -1);
3717 move_block_to_reg (REGNO (reg), x, partial, mode);
3720 if (extra && args_addr == 0 && where_pad == stack_direction)
3721 anti_adjust_stack (GEN_INT (extra));
3723 if (alignment_pad && args_addr == 0)
3724 anti_adjust_stack (alignment_pad);
3727 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3731 get_subtarget (rtx x)
3734 /* Only registers can be subtargets. */
3735 || GET_CODE (x) != REG
3736 /* If the register is readonly, it can't be set more than once. */
3737 || RTX_UNCHANGING_P (x)
3738 /* Don't use hard regs to avoid extending their life. */
3739 || REGNO (x) < FIRST_PSEUDO_REGISTER
3740 /* Avoid subtargets inside loops,
3741 since they hide some invariant expressions. */
3742 || preserve_subexpressions_p ())
3746 /* Expand an assignment that stores the value of FROM into TO.
3747 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3748 (This may contain a QUEUED rtx;
3749 if the value is constant, this rtx is a constant.)
3750 Otherwise, the returned value is NULL_RTX. */
3753 expand_assignment (tree to, tree from, int want_value)
3758 /* Don't crash if the lhs of the assignment was erroneous. */
3760 if (TREE_CODE (to) == ERROR_MARK)
3762 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3763 return want_value ? result : NULL_RTX;
3766 /* Assignment of a structure component needs special treatment
3767 if the structure component's rtx is not simply a MEM.
3768 Assignment of an array element at a constant index, and assignment of
3769 an array element in an unaligned packed structure field, has the same
3772 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3773 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF
3774 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
3776 enum machine_mode mode1;
3777 HOST_WIDE_INT bitsize, bitpos;
3785 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3786 &unsignedp, &volatilep);
3788 /* If we are going to use store_bit_field and extract_bit_field,
3789 make sure to_rtx will be safe for multiple use. */
3791 if (mode1 == VOIDmode && want_value)
3792 tem = stabilize_reference (tem);
3794 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3798 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
3800 if (GET_CODE (to_rtx) != MEM)
3803 #ifdef POINTERS_EXTEND_UNSIGNED
3804 if (GET_MODE (offset_rtx) != Pmode)
3805 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
3807 if (GET_MODE (offset_rtx) != ptr_mode)
3808 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3811 /* A constant address in TO_RTX can have VOIDmode, we must not try
3812 to call force_reg for that case. Avoid that case. */
3813 if (GET_CODE (to_rtx) == MEM
3814 && GET_MODE (to_rtx) == BLKmode
3815 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3817 && (bitpos % bitsize) == 0
3818 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3819 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
3821 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3825 to_rtx = offset_address (to_rtx, offset_rtx,
3826 highest_pow2_factor_for_target (to,
3830 if (GET_CODE (to_rtx) == MEM)
3832 /* If the field is at offset zero, we could have been given the
3833 DECL_RTX of the parent struct. Don't munge it. */
3834 to_rtx = shallow_copy_rtx (to_rtx);
3836 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
3839 /* Deal with volatile and readonly fields. The former is only done
3840 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3841 if (volatilep && GET_CODE (to_rtx) == MEM)
3843 if (to_rtx == orig_to_rtx)
3844 to_rtx = copy_rtx (to_rtx);
3845 MEM_VOLATILE_P (to_rtx) = 1;
3848 if (TREE_CODE (to) == COMPONENT_REF
3849 && TREE_READONLY (TREE_OPERAND (to, 1))
3850 /* We can't assert that a MEM won't be set more than once
3851 if the component is not addressable because another
3852 non-addressable component may be referenced by the same MEM. */
3853 && ! (GET_CODE (to_rtx) == MEM && ! can_address_p (to)))
3855 if (to_rtx == orig_to_rtx)
3856 to_rtx = copy_rtx (to_rtx);
3857 RTX_UNCHANGING_P (to_rtx) = 1;
3860 if (GET_CODE (to_rtx) == MEM && ! can_address_p (to))
3862 if (to_rtx == orig_to_rtx)
3863 to_rtx = copy_rtx (to_rtx);
3864 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3867 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3869 /* Spurious cast for HPUX compiler. */
3870 ? ((enum machine_mode)
3871 TYPE_MODE (TREE_TYPE (to)))
3873 unsignedp, TREE_TYPE (tem), get_alias_set (to));
3875 preserve_temp_slots (result);
3879 /* If the value is meaningful, convert RESULT to the proper mode.
3880 Otherwise, return nothing. */
3881 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3882 TYPE_MODE (TREE_TYPE (from)),
3884 TYPE_UNSIGNED (TREE_TYPE (to)))
3888 /* If the rhs is a function call and its value is not an aggregate,
3889 call the function before we start to compute the lhs.
3890 This is needed for correct code for cases such as
3891 val = setjmp (buf) on machines where reference to val
3892 requires loading up part of an address in a separate insn.
3894 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3895 since it might be a promoted variable where the zero- or sign- extension
3896 needs to be done. Handling this in the normal way is safe because no
3897 computation is done before the call. */
3898 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
3899 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3900 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3901 && GET_CODE (DECL_RTL (to)) == REG))
3906 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3908 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3910 /* Handle calls that return values in multiple non-contiguous locations.
3911 The Irix 6 ABI has examples of this. */
3912 if (GET_CODE (to_rtx) == PARALLEL)
3913 emit_group_load (to_rtx, value, TREE_TYPE (from),
3914 int_size_in_bytes (TREE_TYPE (from)));
3915 else if (GET_MODE (to_rtx) == BLKmode)
3916 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
3919 if (POINTER_TYPE_P (TREE_TYPE (to)))
3920 value = convert_memory_address (GET_MODE (to_rtx), value);
3921 emit_move_insn (to_rtx, value);
3923 preserve_temp_slots (to_rtx);
3926 return want_value ? to_rtx : NULL_RTX;
3929 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3930 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3933 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3935 /* Don't move directly into a return register. */
3936 if (TREE_CODE (to) == RESULT_DECL
3937 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3942 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3944 if (GET_CODE (to_rtx) == PARALLEL)
3945 emit_group_load (to_rtx, temp, TREE_TYPE (from),
3946 int_size_in_bytes (TREE_TYPE (from)));
3948 emit_move_insn (to_rtx, temp);
3950 preserve_temp_slots (to_rtx);
3953 return want_value ? to_rtx : NULL_RTX;
3956 /* In case we are returning the contents of an object which overlaps
3957 the place the value is being stored, use a safe function when copying
3958 a value through a pointer into a structure value return block. */
3959 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3960 && current_function_returns_struct
3961 && !current_function_returns_pcc_struct)
3966 size = expr_size (from);
3967 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
3969 if (TARGET_MEM_FUNCTIONS)
3970 emit_library_call (memmove_libfunc, LCT_NORMAL,
3971 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3972 XEXP (from_rtx, 0), Pmode,
3973 convert_to_mode (TYPE_MODE (sizetype),
3974 size, TYPE_UNSIGNED (sizetype)),
3975 TYPE_MODE (sizetype));
3977 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3978 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3979 XEXP (to_rtx, 0), Pmode,
3980 convert_to_mode (TYPE_MODE (integer_type_node),
3982 TYPE_UNSIGNED (integer_type_node)),
3983 TYPE_MODE (integer_type_node));
3985 preserve_temp_slots (to_rtx);
3988 return want_value ? to_rtx : NULL_RTX;
3991 /* Compute FROM and store the value in the rtx we got. */
3994 result = store_expr (from, to_rtx, want_value);
3995 preserve_temp_slots (result);
3998 return want_value ? result : NULL_RTX;
4001 /* Generate code for computing expression EXP,
4002 and storing the value into TARGET.
4003 TARGET may contain a QUEUED rtx.
4005 If WANT_VALUE & 1 is nonzero, return a copy of the value
4006 not in TARGET, so that we can be sure to use the proper
4007 value in a containing expression even if TARGET has something
4008 else stored in it. If possible, we copy the value through a pseudo
4009 and return that pseudo. Or, if the value is constant, we try to
4010 return the constant. In some cases, we return a pseudo
4011 copied *from* TARGET.
4013 If the mode is BLKmode then we may return TARGET itself.
4014 It turns out that in BLKmode it doesn't cause a problem.
4015 because C has no operators that could combine two different
4016 assignments into the same BLKmode object with different values
4017 with no sequence point. Will other languages need this to
4020 If WANT_VALUE & 1 is 0, we return NULL, to make sure
4021 to catch quickly any cases where the caller uses the value
4022 and fails to set WANT_VALUE.
4024 If WANT_VALUE & 2 is set, this is a store into a call param on the
4025 stack, and block moves may need to be treated specially. */
4028 store_expr (tree exp, rtx target, int want_value)
4031 rtx alt_rtl = NULL_RTX;
4032 rtx mark = mark_queue ();
4033 int dont_return_target = 0;
4034 int dont_store_target = 0;
4036 if (VOID_TYPE_P (TREE_TYPE (exp)))
4038 /* C++ can generate ?: expressions with a throw expression in one
4039 branch and an rvalue in the other. Here, we resolve attempts to
4040 store the throw expression's nonexistent result. */
4043 expand_expr (exp, const0_rtx, VOIDmode, 0);
4046 if (TREE_CODE (exp) == COMPOUND_EXPR)
4048 /* Perform first part of compound expression, then assign from second
4050 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4051 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4053 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4055 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4057 /* For conditional expression, get safe form of the target. Then
4058 test the condition, doing the appropriate assignment on either
4059 side. This avoids the creation of unnecessary temporaries.
4060 For non-BLKmode, it is more efficient not to do this. */
4062 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4065 target = protect_from_queue (target, 1);
4067 do_pending_stack_adjust ();
4069 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4070 start_cleanup_deferral ();
4071 store_expr (TREE_OPERAND (exp, 1), target, want_value & 2);
4072 end_cleanup_deferral ();
4074 emit_jump_insn (gen_jump (lab2));
4077 start_cleanup_deferral ();
4078 store_expr (TREE_OPERAND (exp, 2), target, want_value & 2);
4079 end_cleanup_deferral ();
4084 return want_value & 1 ? target : NULL_RTX;
4086 else if (queued_subexp_p (target))
4087 /* If target contains a postincrement, let's not risk
4088 using it as the place to generate the rhs. */
4090 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4092 /* Expand EXP into a new pseudo. */
4093 temp = gen_reg_rtx (GET_MODE (target));
4094 temp = expand_expr (exp, temp, GET_MODE (target),
4096 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4099 temp = expand_expr (exp, NULL_RTX, GET_MODE (target),
4101 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4103 /* If target is volatile, ANSI requires accessing the value
4104 *from* the target, if it is accessed. So make that happen.
4105 In no case return the target itself. */
4106 if (! MEM_VOLATILE_P (target) && (want_value & 1) != 0)
4107 dont_return_target = 1;
4109 else if ((want_value & 1) != 0
4110 && GET_CODE (target) == MEM
4111 && ! MEM_VOLATILE_P (target)
4112 && GET_MODE (target) != BLKmode)
4113 /* If target is in memory and caller wants value in a register instead,
4114 arrange that. Pass TARGET as target for expand_expr so that,
4115 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4116 We know expand_expr will not use the target in that case.
4117 Don't do this if TARGET is volatile because we are supposed
4118 to write it and then read it. */
4120 temp = expand_expr (exp, target, GET_MODE (target),
4121 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4122 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4124 /* If TEMP is already in the desired TARGET, only copy it from
4125 memory and don't store it there again. */
4127 || (rtx_equal_p (temp, target)
4128 && ! side_effects_p (temp) && ! side_effects_p (target)))
4129 dont_store_target = 1;
4130 temp = copy_to_reg (temp);
4132 dont_return_target = 1;
4134 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4135 /* If this is a scalar in a register that is stored in a wider mode
4136 than the declared mode, compute the result into its declared mode
4137 and then convert to the wider mode. Our value is the computed
4140 rtx inner_target = 0;
4142 /* If we don't want a value, we can do the conversion inside EXP,
4143 which will often result in some optimizations. Do the conversion
4144 in two steps: first change the signedness, if needed, then
4145 the extend. But don't do this if the type of EXP is a subtype
4146 of something else since then the conversion might involve
4147 more than just converting modes. */
4148 if ((want_value & 1) == 0
4149 && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4150 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4152 if (TYPE_UNSIGNED (TREE_TYPE (exp))
4153 != SUBREG_PROMOTED_UNSIGNED_P (target))
4155 (lang_hooks.types.signed_or_unsigned_type
4156 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4158 exp = convert (lang_hooks.types.type_for_mode
4159 (GET_MODE (SUBREG_REG (target)),
4160 SUBREG_PROMOTED_UNSIGNED_P (target)),
4163 inner_target = SUBREG_REG (target);
4166 temp = expand_expr (exp, inner_target, VOIDmode,
4167 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4169 /* If TEMP is a MEM and we want a result value, make the access
4170 now so it gets done only once. Strictly speaking, this is
4171 only necessary if the MEM is volatile, or if the address
4172 overlaps TARGET. But not performing the load twice also
4173 reduces the amount of rtl we generate and then have to CSE. */
4174 if (GET_CODE (temp) == MEM && (want_value & 1) != 0)
4175 temp = copy_to_reg (temp);
4177 /* If TEMP is a VOIDmode constant, use convert_modes to make
4178 sure that we properly convert it. */
4179 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4181 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4182 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4183 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4184 GET_MODE (target), temp,
4185 SUBREG_PROMOTED_UNSIGNED_P (target));
4188 convert_move (SUBREG_REG (target), temp,
4189 SUBREG_PROMOTED_UNSIGNED_P (target));
4191 /* If we promoted a constant, change the mode back down to match
4192 target. Otherwise, the caller might get confused by a result whose
4193 mode is larger than expected. */
4195 if ((want_value & 1) != 0 && GET_MODE (temp) != GET_MODE (target))
4197 if (GET_MODE (temp) != VOIDmode)
4199 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4200 SUBREG_PROMOTED_VAR_P (temp) = 1;
4201 SUBREG_PROMOTED_UNSIGNED_SET (temp,
4202 SUBREG_PROMOTED_UNSIGNED_P (target));
4205 temp = convert_modes (GET_MODE (target),
4206 GET_MODE (SUBREG_REG (target)),
4207 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4210 return want_value & 1 ? temp : NULL_RTX;
4214 temp = expand_expr_real (exp, target, GET_MODE (target),
4216 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
4218 /* Return TARGET if it's a specified hardware register.
4219 If TARGET is a volatile mem ref, either return TARGET
4220 or return a reg copied *from* TARGET; ANSI requires this.
4222 Otherwise, if TEMP is not TARGET, return TEMP
4223 if it is constant (for efficiency),
4224 or if we really want the correct value. */
4225 if (!(target && GET_CODE (target) == REG
4226 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4227 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4228 && ! rtx_equal_p (temp, target)
4229 && (CONSTANT_P (temp) || (want_value & 1) != 0))
4230 dont_return_target = 1;
4233 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4234 the same as that of TARGET, adjust the constant. This is needed, for
4235 example, in case it is a CONST_DOUBLE and we want only a word-sized
4237 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4238 && TREE_CODE (exp) != ERROR_MARK
4239 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4240 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4241 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
4243 /* If value was not generated in the target, store it there.
4244 Convert the value to TARGET's type first if necessary and emit the
4245 pending incrementations that have been queued when expanding EXP.
4246 Note that we cannot emit the whole queue blindly because this will
4247 effectively disable the POST_INC optimization later.
4249 If TEMP and TARGET compare equal according to rtx_equal_p, but
4250 one or both of them are volatile memory refs, we have to distinguish
4252 - expand_expr has used TARGET. In this case, we must not generate
4253 another copy. This can be detected by TARGET being equal according
4255 - expand_expr has not used TARGET - that means that the source just
4256 happens to have the same RTX form. Since temp will have been created
4257 by expand_expr, it will compare unequal according to == .
4258 We must generate a copy in this case, to reach the correct number
4259 of volatile memory references. */
4261 if ((! rtx_equal_p (temp, target)
4262 || (temp != target && (side_effects_p (temp)
4263 || side_effects_p (target))))
4264 && TREE_CODE (exp) != ERROR_MARK
4265 && ! dont_store_target
4266 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4267 but TARGET is not valid memory reference, TEMP will differ
4268 from TARGET although it is really the same location. */
4269 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
4270 /* If there's nothing to copy, don't bother. Don't call expr_size
4271 unless necessary, because some front-ends (C++) expr_size-hook
4272 aborts on objects that are not supposed to be bit-copied or
4274 && expr_size (exp) != const0_rtx)
4276 emit_insns_enqueued_after_mark (mark);
4277 target = protect_from_queue (target, 1);
4278 temp = protect_from_queue (temp, 0);
4279 if (GET_MODE (temp) != GET_MODE (target)
4280 && GET_MODE (temp) != VOIDmode)
4282 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4283 if (dont_return_target)
4285 /* In this case, we will return TEMP,
4286 so make sure it has the proper mode.
4287 But don't forget to store the value into TARGET. */
4288 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4289 emit_move_insn (target, temp);
4292 convert_move (target, temp, unsignedp);
4295 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4297 /* Handle copying a string constant into an array. The string
4298 constant may be shorter than the array. So copy just the string's
4299 actual length, and clear the rest. First get the size of the data
4300 type of the string, which is actually the size of the target. */
4301 rtx size = expr_size (exp);
4303 if (GET_CODE (size) == CONST_INT
4304 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4305 emit_block_move (target, temp, size,
4307 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4310 /* Compute the size of the data to copy from the string. */
4312 = size_binop (MIN_EXPR,
4313 make_tree (sizetype, size),
4314 size_int (TREE_STRING_LENGTH (exp)));
4316 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4318 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4321 /* Copy that much. */
4322 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4323 TYPE_UNSIGNED (sizetype));
4324 emit_block_move (target, temp, copy_size_rtx,
4326 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4328 /* Figure out how much is left in TARGET that we have to clear.
4329 Do all calculations in ptr_mode. */
4330 if (GET_CODE (copy_size_rtx) == CONST_INT)
4332 size = plus_constant (size, -INTVAL (copy_size_rtx));
4333 target = adjust_address (target, BLKmode,
4334 INTVAL (copy_size_rtx));
4338 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4339 copy_size_rtx, NULL_RTX, 0,
4342 #ifdef POINTERS_EXTEND_UNSIGNED
4343 if (GET_MODE (copy_size_rtx) != Pmode)
4344 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4345 TYPE_UNSIGNED (sizetype));
4348 target = offset_address (target, copy_size_rtx,
4349 highest_pow2_factor (copy_size));
4350 label = gen_label_rtx ();
4351 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4352 GET_MODE (size), 0, label);
4355 if (size != const0_rtx)
4356 clear_storage (target, size);
4362 /* Handle calls that return values in multiple non-contiguous locations.
4363 The Irix 6 ABI has examples of this. */
4364 else if (GET_CODE (target) == PARALLEL)
4365 emit_group_load (target, temp, TREE_TYPE (exp),
4366 int_size_in_bytes (TREE_TYPE (exp)));
4367 else if (GET_MODE (temp) == BLKmode)
4368 emit_block_move (target, temp, expr_size (exp),
4370 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4373 temp = force_operand (temp, target);
4375 emit_move_insn (target, temp);
4379 /* If we don't want a value, return NULL_RTX. */
4380 if ((want_value & 1) == 0)
4383 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4384 ??? The latter test doesn't seem to make sense. */
4385 else if (dont_return_target && GET_CODE (temp) != MEM)
4388 /* Return TARGET itself if it is a hard register. */
4389 else if ((want_value & 1) != 0
4390 && GET_MODE (target) != BLKmode
4391 && ! (GET_CODE (target) == REG
4392 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4393 return copy_to_reg (target);
4399 /* Return 1 if EXP just contains zeros. FIXME merge with initializer_zerop. */
4402 is_zeros_p (tree exp)
4406 switch (TREE_CODE (exp))
4410 case NON_LVALUE_EXPR:
4411 case VIEW_CONVERT_EXPR:
4412 return is_zeros_p (TREE_OPERAND (exp, 0));
4415 return integer_zerop (exp);
4419 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4422 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4425 for (elt = TREE_VECTOR_CST_ELTS (exp); elt;
4426 elt = TREE_CHAIN (elt))
4427 if (!is_zeros_p (TREE_VALUE (elt)))
4433 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4434 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4435 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4436 if (! is_zeros_p (TREE_VALUE (elt)))
4446 /* Return 1 if EXP contains mostly (3/4) zeros. */
4449 mostly_zeros_p (tree exp)
4451 if (TREE_CODE (exp) == CONSTRUCTOR)
4453 int elts = 0, zeros = 0;
4454 tree elt = CONSTRUCTOR_ELTS (exp);
4455 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4457 /* If there are no ranges of true bits, it is all zero. */
4458 return elt == NULL_TREE;
4460 for (; elt; elt = TREE_CHAIN (elt))
4462 /* We do not handle the case where the index is a RANGE_EXPR,
4463 so the statistic will be somewhat inaccurate.
4464 We do make a more accurate count in store_constructor itself,
4465 so since this function is only used for nested array elements,
4466 this should be close enough. */
4467 if (mostly_zeros_p (TREE_VALUE (elt)))
4472 return 4 * zeros >= 3 * elts;
4475 return is_zeros_p (exp);
4478 /* Helper function for store_constructor.
4479 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4480 TYPE is the type of the CONSTRUCTOR, not the element type.
4481 CLEARED is as for store_constructor.
4482 ALIAS_SET is the alias set to use for any stores.
4484 This provides a recursive shortcut back to store_constructor when it isn't
4485 necessary to go through store_field. This is so that we can pass through
4486 the cleared field to let store_constructor know that we may not have to
4487 clear a substructure if the outer structure has already been cleared. */
4490 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4491 HOST_WIDE_INT bitpos, enum machine_mode mode,
4492 tree exp, tree type, int cleared, int alias_set)
4494 if (TREE_CODE (exp) == CONSTRUCTOR
4495 /* We can only call store_constructor recursively if the size and
4496 bit position are on a byte boundary. */
4497 && bitpos % BITS_PER_UNIT == 0
4498 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
4499 /* If we have a nonzero bitpos for a register target, then we just
4500 let store_field do the bitfield handling. This is unlikely to
4501 generate unnecessary clear instructions anyways. */
4502 && (bitpos == 0 || GET_CODE (target) == MEM))
4504 if (GET_CODE (target) == MEM)
4506 = adjust_address (target,
4507 GET_MODE (target) == BLKmode
4509 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4510 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4513 /* Update the alias set, if required. */
4514 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4515 && MEM_ALIAS_SET (target) != 0)
4517 target = copy_rtx (target);
4518 set_mem_alias_set (target, alias_set);
4521 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4524 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4528 /* Store the value of constructor EXP into the rtx TARGET.
4529 TARGET is either a REG or a MEM; we know it cannot conflict, since
4530 safe_from_p has been called.
4531 CLEARED is true if TARGET is known to have been zero'd.
4532 SIZE is the number of bytes of TARGET we are allowed to modify: this
4533 may not be the same as the size of EXP if we are assigning to a field
4534 which has been packed to exclude padding bits. */
4537 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
4539 tree type = TREE_TYPE (exp);
4540 #ifdef WORD_REGISTER_OPERATIONS
4541 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4544 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4545 || TREE_CODE (type) == QUAL_UNION_TYPE)
4549 /* If size is zero or the target is already cleared, do nothing. */
4550 if (size == 0 || cleared)
4552 /* We either clear the aggregate or indicate the value is dead. */
4553 else if ((TREE_CODE (type) == UNION_TYPE
4554 || TREE_CODE (type) == QUAL_UNION_TYPE)
4555 && ! CONSTRUCTOR_ELTS (exp))
4556 /* If the constructor is empty, clear the union. */
4558 clear_storage (target, expr_size (exp));
4562 /* If we are building a static constructor into a register,
4563 set the initial value as zero so we can fold the value into
4564 a constant. But if more than one register is involved,
4565 this probably loses. */
4566 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4567 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4569 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4573 /* If the constructor has fewer fields than the structure
4574 or if we are initializing the structure to mostly zeros,
4575 clear the whole structure first. Don't do this if TARGET is a
4576 register whose mode size isn't equal to SIZE since clear_storage
4577 can't handle this case. */
4578 else if (((list_length (CONSTRUCTOR_ELTS (exp)) != fields_length (type))
4579 || mostly_zeros_p (exp))
4580 && (GET_CODE (target) != REG
4581 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4584 rtx xtarget = target;
4586 if (readonly_fields_p (type))
4588 xtarget = copy_rtx (xtarget);
4589 RTX_UNCHANGING_P (xtarget) = 1;
4592 clear_storage (xtarget, GEN_INT (size));
4597 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4599 /* Store each element of the constructor into
4600 the corresponding field of TARGET. */
4602 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4604 tree field = TREE_PURPOSE (elt);
4605 tree value = TREE_VALUE (elt);
4606 enum machine_mode mode;
4607 HOST_WIDE_INT bitsize;
4608 HOST_WIDE_INT bitpos = 0;
4610 rtx to_rtx = target;
4612 /* Just ignore missing fields.
4613 We cleared the whole structure, above,
4614 if any fields are missing. */
4618 if (cleared && is_zeros_p (value))
4621 if (host_integerp (DECL_SIZE (field), 1))
4622 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4626 mode = DECL_MODE (field);
4627 if (DECL_BIT_FIELD (field))
4630 offset = DECL_FIELD_OFFSET (field);
4631 if (host_integerp (offset, 0)
4632 && host_integerp (bit_position (field), 0))
4634 bitpos = int_bit_position (field);
4638 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4645 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
4646 make_tree (TREE_TYPE (exp),
4649 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4650 if (GET_CODE (to_rtx) != MEM)
4653 #ifdef POINTERS_EXTEND_UNSIGNED
4654 if (GET_MODE (offset_rtx) != Pmode)
4655 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4657 if (GET_MODE (offset_rtx) != ptr_mode)
4658 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4661 to_rtx = offset_address (to_rtx, offset_rtx,
4662 highest_pow2_factor (offset));
4665 if (TREE_READONLY (field))
4667 if (GET_CODE (to_rtx) == MEM)
4668 to_rtx = copy_rtx (to_rtx);
4670 RTX_UNCHANGING_P (to_rtx) = 1;
4673 #ifdef WORD_REGISTER_OPERATIONS
4674 /* If this initializes a field that is smaller than a word, at the
4675 start of a word, try to widen it to a full word.
4676 This special case allows us to output C++ member function
4677 initializations in a form that the optimizers can understand. */
4678 if (GET_CODE (target) == REG
4679 && bitsize < BITS_PER_WORD
4680 && bitpos % BITS_PER_WORD == 0
4681 && GET_MODE_CLASS (mode) == MODE_INT
4682 && TREE_CODE (value) == INTEGER_CST
4684 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4686 tree type = TREE_TYPE (value);
4688 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4690 type = lang_hooks.types.type_for_size
4691 (BITS_PER_WORD, TYPE_UNSIGNED (type));
4692 value = convert (type, value);
4695 if (BYTES_BIG_ENDIAN)
4697 = fold (build (LSHIFT_EXPR, type, value,
4698 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4699 bitsize = BITS_PER_WORD;
4704 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4705 && DECL_NONADDRESSABLE_P (field))
4707 to_rtx = copy_rtx (to_rtx);
4708 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4711 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4712 value, type, cleared,
4713 get_alias_set (TREE_TYPE (field)));
4716 else if (TREE_CODE (type) == ARRAY_TYPE
4717 || TREE_CODE (type) == VECTOR_TYPE)
4723 tree elttype = TREE_TYPE (type);
4725 HOST_WIDE_INT minelt = 0;
4726 HOST_WIDE_INT maxelt = 0;
4730 unsigned n_elts = 0;
4732 if (TREE_CODE (type) == ARRAY_TYPE)
4733 domain = TYPE_DOMAIN (type);
4735 /* Vectors do not have domains; look up the domain of
4736 the array embedded in the debug representation type.
4737 FIXME Would probably be more efficient to treat vectors
4738 separately from arrays. */
4740 domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
4741 domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
4742 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
4744 enum machine_mode mode = GET_MODE (target);
4746 icode = (int) vec_init_optab->handlers[mode].insn_code;
4747 if (icode != CODE_FOR_nothing)
4751 elt_size = GET_MODE_SIZE (GET_MODE_INNER (mode));
4752 n_elts = (GET_MODE_SIZE (mode) / elt_size);
4753 vector = alloca (n_elts);
4754 for (i = 0; i < n_elts; i++)
4755 vector [i] = CONST0_RTX (GET_MODE_INNER (mode));
4760 const_bounds_p = (TYPE_MIN_VALUE (domain)
4761 && TYPE_MAX_VALUE (domain)
4762 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4763 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4765 /* If we have constant bounds for the range of the type, get them. */
4768 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4769 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4772 /* If the constructor has fewer elements than the array,
4773 clear the whole array first. Similarly if this is
4774 static constructor of a non-BLKmode object. */
4775 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4779 HOST_WIDE_INT count = 0, zero_count = 0;
4780 need_to_clear = ! const_bounds_p;
4782 /* This loop is a more accurate version of the loop in
4783 mostly_zeros_p (it handles RANGE_EXPR in an index).
4784 It is also needed to check for missing elements. */
4785 for (elt = CONSTRUCTOR_ELTS (exp);
4786 elt != NULL_TREE && ! need_to_clear;
4787 elt = TREE_CHAIN (elt))
4789 tree index = TREE_PURPOSE (elt);
4790 HOST_WIDE_INT this_node_count;
4792 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4794 tree lo_index = TREE_OPERAND (index, 0);
4795 tree hi_index = TREE_OPERAND (index, 1);
4797 if (! host_integerp (lo_index, 1)
4798 || ! host_integerp (hi_index, 1))
4804 this_node_count = (tree_low_cst (hi_index, 1)
4805 - tree_low_cst (lo_index, 1) + 1);
4808 this_node_count = 1;
4810 count += this_node_count;
4811 if (mostly_zeros_p (TREE_VALUE (elt)))
4812 zero_count += this_node_count;
4815 /* Clear the entire array first if there are any missing elements,
4816 or if the incidence of zero elements is >= 75%. */
4818 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4822 if (need_to_clear && size > 0 && !vector)
4827 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4829 clear_storage (target, GEN_INT (size));
4833 else if (REG_P (target))
4834 /* Inform later passes that the old value is dead. */
4835 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4837 /* Store each element of the constructor into
4838 the corresponding element of TARGET, determined
4839 by counting the elements. */
4840 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4842 elt = TREE_CHAIN (elt), i++)
4844 enum machine_mode mode;
4845 HOST_WIDE_INT bitsize;
4846 HOST_WIDE_INT bitpos;
4848 tree value = TREE_VALUE (elt);
4849 tree index = TREE_PURPOSE (elt);
4850 rtx xtarget = target;
4852 if (cleared && is_zeros_p (value))
4855 unsignedp = TYPE_UNSIGNED (elttype);
4856 mode = TYPE_MODE (elttype);
4857 if (mode == BLKmode)
4858 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4859 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4862 bitsize = GET_MODE_BITSIZE (mode);
4864 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4866 tree lo_index = TREE_OPERAND (index, 0);
4867 tree hi_index = TREE_OPERAND (index, 1);
4868 rtx index_r, pos_rtx, loop_end;
4869 struct nesting *loop;
4870 HOST_WIDE_INT lo, hi, count;
4876 /* If the range is constant and "small", unroll the loop. */
4878 && host_integerp (lo_index, 0)
4879 && host_integerp (hi_index, 0)
4880 && (lo = tree_low_cst (lo_index, 0),
4881 hi = tree_low_cst (hi_index, 0),
4882 count = hi - lo + 1,
4883 (GET_CODE (target) != MEM
4885 || (host_integerp (TYPE_SIZE (elttype), 1)
4886 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4889 lo -= minelt; hi -= minelt;
4890 for (; lo <= hi; lo++)
4892 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4894 if (GET_CODE (target) == MEM
4895 && !MEM_KEEP_ALIAS_SET_P (target)
4896 && TREE_CODE (type) == ARRAY_TYPE
4897 && TYPE_NONALIASED_COMPONENT (type))
4899 target = copy_rtx (target);
4900 MEM_KEEP_ALIAS_SET_P (target) = 1;
4903 store_constructor_field
4904 (target, bitsize, bitpos, mode, value, type, cleared,
4905 get_alias_set (elttype));
4910 expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4911 loop_end = gen_label_rtx ();
4913 unsignedp = TYPE_UNSIGNED (domain);
4915 index = build_decl (VAR_DECL, NULL_TREE, domain);
4918 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4920 SET_DECL_RTL (index, index_r);
4921 if (TREE_CODE (value) == SAVE_EXPR
4922 && SAVE_EXPR_RTL (value) == 0)
4924 /* Make sure value gets expanded once before the
4926 expand_expr (value, const0_rtx, VOIDmode, 0);
4929 store_expr (lo_index, index_r, 0);
4930 loop = expand_start_loop (0);
4932 /* Assign value to element index. */
4934 = convert (ssizetype,
4935 fold (build (MINUS_EXPR, TREE_TYPE (index),
4936 index, TYPE_MIN_VALUE (domain))));
4937 position = size_binop (MULT_EXPR, position,
4939 TYPE_SIZE_UNIT (elttype)));
4941 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4942 xtarget = offset_address (target, pos_rtx,
4943 highest_pow2_factor (position));
4944 xtarget = adjust_address (xtarget, mode, 0);
4945 if (TREE_CODE (value) == CONSTRUCTOR)
4946 store_constructor (value, xtarget, cleared,
4947 bitsize / BITS_PER_UNIT);
4949 store_expr (value, xtarget, 0);
4951 expand_exit_loop_if_false (loop,
4952 build (LT_EXPR, integer_type_node,
4955 expand_increment (build (PREINCREMENT_EXPR,
4957 index, integer_one_node), 0, 0);
4959 emit_label (loop_end);
4962 else if ((index != 0 && ! host_integerp (index, 0))
4963 || ! host_integerp (TYPE_SIZE (elttype), 1))
4971 index = ssize_int (1);
4974 index = convert (ssizetype,
4975 fold (build (MINUS_EXPR, index,
4976 TYPE_MIN_VALUE (domain))));
4978 position = size_binop (MULT_EXPR, index,
4980 TYPE_SIZE_UNIT (elttype)));
4981 xtarget = offset_address (target,
4982 expand_expr (position, 0, VOIDmode, 0),
4983 highest_pow2_factor (position));
4984 xtarget = adjust_address (xtarget, mode, 0);
4985 store_expr (value, xtarget, 0);
4992 pos = tree_low_cst (index, 0) - minelt;
4995 vector[pos] = expand_expr (value, NULL_RTX, VOIDmode, 0);
5000 bitpos = ((tree_low_cst (index, 0) - minelt)
5001 * tree_low_cst (TYPE_SIZE (elttype), 1));
5003 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5005 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
5006 && TREE_CODE (type) == ARRAY_TYPE
5007 && TYPE_NONALIASED_COMPONENT (type))
5009 target = copy_rtx (target);
5010 MEM_KEEP_ALIAS_SET_P (target) = 1;
5012 store_constructor_field (target, bitsize, bitpos, mode, value,
5013 type, cleared, get_alias_set (elttype));
5018 emit_insn (GEN_FCN (icode) (target,
5019 gen_rtx_PARALLEL (GET_MODE (target),
5020 gen_rtvec_v (n_elts, vector))));
5024 /* Set constructor assignments. */
5025 else if (TREE_CODE (type) == SET_TYPE)
5027 tree elt = CONSTRUCTOR_ELTS (exp);
5028 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
5029 tree domain = TYPE_DOMAIN (type);
5030 tree domain_min, domain_max, bitlength;
5032 /* The default implementation strategy is to extract the constant
5033 parts of the constructor, use that to initialize the target,
5034 and then "or" in whatever non-constant ranges we need in addition.
5036 If a large set is all zero or all ones, it is
5037 probably better to set it using memset (if available) or bzero.
5038 Also, if a large set has just a single range, it may also be
5039 better to first clear all the first clear the set (using
5040 bzero/memset), and set the bits we want. */
5042 /* Check for all zeros. */
5043 if (elt == NULL_TREE && size > 0)
5046 clear_storage (target, GEN_INT (size));
5050 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
5051 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
5052 bitlength = size_binop (PLUS_EXPR,
5053 size_diffop (domain_max, domain_min),
5056 nbits = tree_low_cst (bitlength, 1);
5058 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
5059 are "complicated" (more than one range), initialize (the
5060 constant parts) by copying from a constant. */
5061 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
5062 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
5064 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
5065 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
5066 char *bit_buffer = alloca (nbits);
5067 HOST_WIDE_INT word = 0;
5068 unsigned int bit_pos = 0;
5069 unsigned int ibit = 0;
5070 unsigned int offset = 0; /* In bytes from beginning of set. */
5072 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
5075 if (bit_buffer[ibit])
5077 if (BYTES_BIG_ENDIAN)
5078 word |= (1 << (set_word_size - 1 - bit_pos));
5080 word |= 1 << bit_pos;
5084 if (bit_pos >= set_word_size || ibit == nbits)
5086 if (word != 0 || ! cleared)
5088 rtx datum = gen_int_mode (word, mode);
5091 /* The assumption here is that it is safe to use
5092 XEXP if the set is multi-word, but not if
5093 it's single-word. */
5094 if (GET_CODE (target) == MEM)
5095 to_rtx = adjust_address (target, mode, offset);
5096 else if (offset == 0)
5100 emit_move_insn (to_rtx, datum);
5107 offset += set_word_size / BITS_PER_UNIT;
5112 /* Don't bother clearing storage if the set is all ones. */
5113 if (TREE_CHAIN (elt) != NULL_TREE
5114 || (TREE_PURPOSE (elt) == NULL_TREE
5116 : ( ! host_integerp (TREE_VALUE (elt), 0)
5117 || ! host_integerp (TREE_PURPOSE (elt), 0)
5118 || (tree_low_cst (TREE_VALUE (elt), 0)
5119 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5120 != (HOST_WIDE_INT) nbits))))
5121 clear_storage (target, expr_size (exp));
5123 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5125 /* Start of range of element or NULL. */
5126 tree startbit = TREE_PURPOSE (elt);
5127 /* End of range of element, or element value. */
5128 tree endbit = TREE_VALUE (elt);
5129 HOST_WIDE_INT startb, endb;
5130 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5132 bitlength_rtx = expand_expr (bitlength,
5133 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5135 /* Handle non-range tuple element like [ expr ]. */
5136 if (startbit == NULL_TREE)
5138 startbit = save_expr (endbit);
5142 startbit = convert (sizetype, startbit);
5143 endbit = convert (sizetype, endbit);
5144 if (! integer_zerop (domain_min))
5146 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5147 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5149 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5150 EXPAND_CONST_ADDRESS);
5151 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5152 EXPAND_CONST_ADDRESS);
5158 ((build_qualified_type (lang_hooks.types.type_for_mode
5159 (GET_MODE (target), 0),
5162 emit_move_insn (targetx, target);
5165 else if (GET_CODE (target) == MEM)
5170 /* Optimization: If startbit and endbit are constants divisible
5171 by BITS_PER_UNIT, call memset instead. */
5172 if (TARGET_MEM_FUNCTIONS
5173 && TREE_CODE (startbit) == INTEGER_CST
5174 && TREE_CODE (endbit) == INTEGER_CST
5175 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5176 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5178 emit_library_call (memset_libfunc, LCT_NORMAL,
5180 plus_constant (XEXP (targetx, 0),
5181 startb / BITS_PER_UNIT),
5183 constm1_rtx, TYPE_MODE (integer_type_node),
5184 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5185 TYPE_MODE (sizetype));
5188 emit_library_call (setbits_libfunc, LCT_NORMAL,
5189 VOIDmode, 4, XEXP (targetx, 0),
5190 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5191 startbit_rtx, TYPE_MODE (sizetype),
5192 endbit_rtx, TYPE_MODE (sizetype));
5195 emit_move_insn (target, targetx);
5203 /* Store the value of EXP (an expression tree)
5204 into a subfield of TARGET which has mode MODE and occupies
5205 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5206 If MODE is VOIDmode, it means that we are storing into a bit-field.
5208 If VALUE_MODE is VOIDmode, return nothing in particular.
5209 UNSIGNEDP is not used in this case.
5211 Otherwise, return an rtx for the value stored. This rtx
5212 has mode VALUE_MODE if that is convenient to do.
5213 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5215 TYPE is the type of the underlying object,
5217 ALIAS_SET is the alias set for the destination. This value will
5218 (in general) be different from that for TARGET, since TARGET is a
5219 reference to the containing structure. */
5222 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5223 enum machine_mode mode, tree exp, enum machine_mode value_mode,
5224 int unsignedp, tree type, int alias_set)
5226 HOST_WIDE_INT width_mask = 0;
5228 if (TREE_CODE (exp) == ERROR_MARK)
5231 /* If we have nothing to store, do nothing unless the expression has
5234 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5235 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5236 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5238 /* If we are storing into an unaligned field of an aligned union that is
5239 in a register, we may have the mode of TARGET being an integer mode but
5240 MODE == BLKmode. In that case, get an aligned object whose size and
5241 alignment are the same as TARGET and store TARGET into it (we can avoid
5242 the store if the field being stored is the entire width of TARGET). Then
5243 call ourselves recursively to store the field into a BLKmode version of
5244 that object. Finally, load from the object into TARGET. This is not
5245 very efficient in general, but should only be slightly more expensive
5246 than the otherwise-required unaligned accesses. Perhaps this can be
5247 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5248 twice, once with emit_move_insn and once via store_field. */
5251 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5253 rtx object = assign_temp (type, 0, 1, 1);
5254 rtx blk_object = adjust_address (object, BLKmode, 0);
5256 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5257 emit_move_insn (object, target);
5259 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5262 emit_move_insn (target, object);
5264 /* We want to return the BLKmode version of the data. */
5268 if (GET_CODE (target) == CONCAT)
5270 /* We're storing into a struct containing a single __complex. */
5274 return store_expr (exp, target, 0);
5277 /* If the structure is in a register or if the component
5278 is a bit field, we cannot use addressing to access it.
5279 Use bit-field techniques or SUBREG to store in it. */
5281 if (mode == VOIDmode
5282 || (mode != BLKmode && ! direct_store[(int) mode]
5283 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5284 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5285 || GET_CODE (target) == REG
5286 || GET_CODE (target) == SUBREG
5287 /* If the field isn't aligned enough to store as an ordinary memref,
5288 store it as a bit field. */
5290 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5291 || bitpos % GET_MODE_ALIGNMENT (mode))
5292 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5293 || (bitpos % BITS_PER_UNIT != 0)))
5294 /* If the RHS and field are a constant size and the size of the
5295 RHS isn't the same size as the bitfield, we must use bitfield
5298 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5299 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5301 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5303 /* If BITSIZE is narrower than the size of the type of EXP
5304 we will be narrowing TEMP. Normally, what's wanted are the
5305 low-order bits. However, if EXP's type is a record and this is
5306 big-endian machine, we want the upper BITSIZE bits. */
5307 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5308 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5309 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5310 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5311 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5315 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5317 if (mode != VOIDmode && mode != BLKmode
5318 && mode != TYPE_MODE (TREE_TYPE (exp)))
5319 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5321 /* If the modes of TARGET and TEMP are both BLKmode, both
5322 must be in memory and BITPOS must be aligned on a byte
5323 boundary. If so, we simply do a block copy. */
5324 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5326 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5327 || bitpos % BITS_PER_UNIT != 0)
5330 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5331 emit_block_move (target, temp,
5332 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5336 return value_mode == VOIDmode ? const0_rtx : target;
5339 /* Store the value in the bitfield. */
5340 store_bit_field (target, bitsize, bitpos, mode, temp,
5341 int_size_in_bytes (type));
5343 if (value_mode != VOIDmode)
5345 /* The caller wants an rtx for the value.
5346 If possible, avoid refetching from the bitfield itself. */
5348 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5351 enum machine_mode tmode;
5353 tmode = GET_MODE (temp);
5354 if (tmode == VOIDmode)
5358 return expand_and (tmode, temp,
5359 gen_int_mode (width_mask, tmode),
5362 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5363 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5364 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5367 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5368 NULL_RTX, value_mode, VOIDmode,
5369 int_size_in_bytes (type));
5375 rtx addr = XEXP (target, 0);
5376 rtx to_rtx = target;
5378 /* If a value is wanted, it must be the lhs;
5379 so make the address stable for multiple use. */
5381 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5382 && ! CONSTANT_ADDRESS_P (addr)
5383 /* A frame-pointer reference is already stable. */
5384 && ! (GET_CODE (addr) == PLUS
5385 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5386 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5387 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5388 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5390 /* Now build a reference to just the desired component. */
5392 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5394 if (to_rtx == target)
5395 to_rtx = copy_rtx (to_rtx);
5397 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5398 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5399 set_mem_alias_set (to_rtx, alias_set);
5401 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5405 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5406 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5407 codes and find the ultimate containing object, which we return.
5409 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5410 bit position, and *PUNSIGNEDP to the signedness of the field.
5411 If the position of the field is variable, we store a tree
5412 giving the variable offset (in units) in *POFFSET.
5413 This offset is in addition to the bit position.
5414 If the position is not variable, we store 0 in *POFFSET.
5416 If any of the extraction expressions is volatile,
5417 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5419 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5420 is a mode that can be used to access the field. In that case, *PBITSIZE
5423 If the field describes a variable-sized object, *PMODE is set to
5424 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5425 this case, but the address of the object can be found. */
5428 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5429 HOST_WIDE_INT *pbitpos, tree *poffset,
5430 enum machine_mode *pmode, int *punsignedp,
5434 enum machine_mode mode = VOIDmode;
5435 tree offset = size_zero_node;
5436 tree bit_offset = bitsize_zero_node;
5439 /* First get the mode, signedness, and size. We do this from just the
5440 outermost expression. */
5441 if (TREE_CODE (exp) == COMPONENT_REF)
5443 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5444 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5445 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5447 *punsignedp = DECL_UNSIGNED (TREE_OPERAND (exp, 1));
5449 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5451 size_tree = TREE_OPERAND (exp, 1);
5452 *punsignedp = BIT_FIELD_REF_UNSIGNED (exp);
5456 mode = TYPE_MODE (TREE_TYPE (exp));
5457 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
5459 if (mode == BLKmode)
5460 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5462 *pbitsize = GET_MODE_BITSIZE (mode);
5467 if (! host_integerp (size_tree, 1))
5468 mode = BLKmode, *pbitsize = -1;
5470 *pbitsize = tree_low_cst (size_tree, 1);
5473 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5474 and find the ultimate containing object. */
5477 if (TREE_CODE (exp) == BIT_FIELD_REF)
5478 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5479 else if (TREE_CODE (exp) == COMPONENT_REF)
5481 tree field = TREE_OPERAND (exp, 1);
5482 tree this_offset = DECL_FIELD_OFFSET (field);
5484 /* If this field hasn't been filled in yet, don't go
5485 past it. This should only happen when folding expressions
5486 made during type construction. */
5487 if (this_offset == 0)
5490 this_offset = SUBSTITUTE_PLACEHOLDER_IN_EXPR (this_offset, exp);
5492 offset = size_binop (PLUS_EXPR, offset, this_offset);
5493 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5494 DECL_FIELD_BIT_OFFSET (field));
5496 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5499 else if (TREE_CODE (exp) == ARRAY_REF
5500 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5502 tree index = TREE_OPERAND (exp, 1);
5503 tree array = TREE_OPERAND (exp, 0);
5504 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5505 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5506 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5508 /* We assume all arrays have sizes that are a multiple of a byte.
5509 First subtract the lower bound, if any, in the type of the
5510 index, then convert to sizetype and multiply by the size of the
5512 if (low_bound != 0 && ! integer_zerop (low_bound))
5513 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5516 /* If the index has a self-referential type, instantiate it with
5517 the object; likewise for the component size. */
5518 index = SUBSTITUTE_PLACEHOLDER_IN_EXPR (index, exp);
5519 unit_size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (unit_size, array);
5520 offset = size_binop (PLUS_EXPR, offset,
5521 size_binop (MULT_EXPR,
5522 convert (sizetype, index),
5526 /* We can go inside most conversions: all NON_VALUE_EXPRs, all normal
5527 conversions that don't change the mode, and all view conversions
5528 except those that need to "step up" the alignment. */
5529 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5530 && ! (TREE_CODE (exp) == VIEW_CONVERT_EXPR
5531 && ! ((TYPE_ALIGN (TREE_TYPE (exp))
5532 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5534 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5535 < BIGGEST_ALIGNMENT)
5536 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5537 || TYPE_ALIGN_OK (TREE_TYPE
5538 (TREE_OPERAND (exp, 0))))))
5539 && ! ((TREE_CODE (exp) == NOP_EXPR
5540 || TREE_CODE (exp) == CONVERT_EXPR)
5541 && (TYPE_MODE (TREE_TYPE (exp))
5542 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5545 /* If any reference in the chain is volatile, the effect is volatile. */
5546 if (TREE_THIS_VOLATILE (exp))
5549 exp = TREE_OPERAND (exp, 0);
5552 /* If OFFSET is constant, see if we can return the whole thing as a
5553 constant bit position. Otherwise, split it up. */
5554 if (host_integerp (offset, 0)
5555 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5557 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5558 && host_integerp (tem, 0))
5559 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5561 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5567 /* Return 1 if T is an expression that get_inner_reference handles. */
5570 handled_component_p (tree t)
5572 switch (TREE_CODE (t))
5577 case ARRAY_RANGE_REF:
5578 case NON_LVALUE_EXPR:
5579 case VIEW_CONVERT_EXPR:
5582 /* ??? Sure they are handled, but get_inner_reference may return
5583 a different PBITSIZE, depending upon whether the expression is
5584 wrapped up in a NOP_EXPR or not, e.g. for bitfields. */
5587 return (TYPE_MODE (TREE_TYPE (t))
5588 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5595 /* Given an rtx VALUE that may contain additions and multiplications, return
5596 an equivalent value that just refers to a register, memory, or constant.
5597 This is done by generating instructions to perform the arithmetic and
5598 returning a pseudo-register containing the value.
5600 The returned value may be a REG, SUBREG, MEM or constant. */
5603 force_operand (rtx value, rtx target)
5606 /* Use subtarget as the target for operand 0 of a binary operation. */
5607 rtx subtarget = get_subtarget (target);
5608 enum rtx_code code = GET_CODE (value);
5610 /* Check for subreg applied to an expression produced by loop optimizer. */
5612 && GET_CODE (SUBREG_REG (value)) != REG
5613 && GET_CODE (SUBREG_REG (value)) != MEM)
5615 value = simplify_gen_subreg (GET_MODE (value),
5616 force_reg (GET_MODE (SUBREG_REG (value)),
5617 force_operand (SUBREG_REG (value),
5619 GET_MODE (SUBREG_REG (value)),
5620 SUBREG_BYTE (value));
5621 code = GET_CODE (value);
5624 /* Check for a PIC address load. */
5625 if ((code == PLUS || code == MINUS)
5626 && XEXP (value, 0) == pic_offset_table_rtx
5627 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5628 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5629 || GET_CODE (XEXP (value, 1)) == CONST))
5632 subtarget = gen_reg_rtx (GET_MODE (value));
5633 emit_move_insn (subtarget, value);
5637 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5640 target = gen_reg_rtx (GET_MODE (value));
5641 convert_move (target, force_operand (XEXP (value, 0), NULL),
5642 code == ZERO_EXTEND);
5646 if (ARITHMETIC_P (value))
5648 op2 = XEXP (value, 1);
5649 if (!CONSTANT_P (op2) && !(GET_CODE (op2) == REG && op2 != subtarget))
5651 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5654 op2 = negate_rtx (GET_MODE (value), op2);
5657 /* Check for an addition with OP2 a constant integer and our first
5658 operand a PLUS of a virtual register and something else. In that
5659 case, we want to emit the sum of the virtual register and the
5660 constant first and then add the other value. This allows virtual
5661 register instantiation to simply modify the constant rather than
5662 creating another one around this addition. */
5663 if (code == PLUS && GET_CODE (op2) == CONST_INT
5664 && GET_CODE (XEXP (value, 0)) == PLUS
5665 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5666 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5667 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5669 rtx temp = expand_simple_binop (GET_MODE (value), code,
5670 XEXP (XEXP (value, 0), 0), op2,
5671 subtarget, 0, OPTAB_LIB_WIDEN);
5672 return expand_simple_binop (GET_MODE (value), code, temp,
5673 force_operand (XEXP (XEXP (value,
5675 target, 0, OPTAB_LIB_WIDEN);
5678 op1 = force_operand (XEXP (value, 0), subtarget);
5679 op2 = force_operand (op2, NULL_RTX);
5683 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5685 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5686 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5687 target, 1, OPTAB_LIB_WIDEN);
5689 return expand_divmod (0,
5690 FLOAT_MODE_P (GET_MODE (value))
5691 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5692 GET_MODE (value), op1, op2, target, 0);
5695 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5699 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5703 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5707 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5708 target, 0, OPTAB_LIB_WIDEN);
5711 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5712 target, 1, OPTAB_LIB_WIDEN);
5715 if (UNARY_P (value))
5717 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5718 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
5721 #ifdef INSN_SCHEDULING
5722 /* On machines that have insn scheduling, we want all memory reference to be
5723 explicit, so we need to deal with such paradoxical SUBREGs. */
5724 if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
5725 && (GET_MODE_SIZE (GET_MODE (value))
5726 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5728 = simplify_gen_subreg (GET_MODE (value),
5729 force_reg (GET_MODE (SUBREG_REG (value)),
5730 force_operand (SUBREG_REG (value),
5732 GET_MODE (SUBREG_REG (value)),
5733 SUBREG_BYTE (value));
5739 /* Subroutine of expand_expr: return nonzero iff there is no way that
5740 EXP can reference X, which is being modified. TOP_P is nonzero if this
5741 call is going to be used to determine whether we need a temporary
5742 for EXP, as opposed to a recursive call to this function.
5744 It is always safe for this routine to return zero since it merely
5745 searches for optimization opportunities. */
5748 safe_from_p (rtx x, tree exp, int top_p)
5752 static tree save_expr_list;
5755 /* If EXP has varying size, we MUST use a target since we currently
5756 have no way of allocating temporaries of variable size
5757 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5758 So we assume here that something at a higher level has prevented a
5759 clash. This is somewhat bogus, but the best we can do. Only
5760 do this when X is BLKmode and when we are at the top level. */
5761 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5762 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5763 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5764 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5765 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5767 && GET_MODE (x) == BLKmode)
5768 /* If X is in the outgoing argument area, it is always safe. */
5769 || (GET_CODE (x) == MEM
5770 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5771 || (GET_CODE (XEXP (x, 0)) == PLUS
5772 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5775 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5776 find the underlying pseudo. */
5777 if (GET_CODE (x) == SUBREG)
5780 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5784 /* A SAVE_EXPR might appear many times in the expression passed to the
5785 top-level safe_from_p call, and if it has a complex subexpression,
5786 examining it multiple times could result in a combinatorial explosion.
5787 E.g. on an Alpha running at least 200MHz, a Fortran testcase compiled
5788 with optimization took about 28 minutes to compile -- even though it was
5789 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5790 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5791 we have processed. Note that the only test of top_p was above. */
5800 rtn = safe_from_p (x, exp, 0);
5802 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5803 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5808 /* Now look at our tree code and possibly recurse. */
5809 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5812 exp_rtl = DECL_RTL_IF_SET (exp);
5819 if (TREE_CODE (exp) == TREE_LIST)
5823 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
5825 exp = TREE_CHAIN (exp);
5828 if (TREE_CODE (exp) != TREE_LIST)
5829 return safe_from_p (x, exp, 0);
5832 else if (TREE_CODE (exp) == ERROR_MARK)
5833 return 1; /* An already-visited SAVE_EXPR? */
5839 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
5844 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5848 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5849 the expression. If it is set, we conflict iff we are that rtx or
5850 both are in memory. Otherwise, we check all operands of the
5851 expression recursively. */
5853 switch (TREE_CODE (exp))
5856 /* If the operand is static or we are static, we can't conflict.
5857 Likewise if we don't conflict with the operand at all. */
5858 if (staticp (TREE_OPERAND (exp, 0))
5859 || TREE_STATIC (exp)
5860 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5863 /* Otherwise, the only way this can conflict is if we are taking
5864 the address of a DECL a that address if part of X, which is
5866 exp = TREE_OPERAND (exp, 0);
5869 if (!DECL_RTL_SET_P (exp)
5870 || GET_CODE (DECL_RTL (exp)) != MEM)
5873 exp_rtl = XEXP (DECL_RTL (exp), 0);
5878 if (GET_CODE (x) == MEM
5879 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5880 get_alias_set (exp)))
5885 /* Assume that the call will clobber all hard registers and
5887 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5888 || GET_CODE (x) == MEM)
5893 /* If a sequence exists, we would have to scan every instruction
5894 in the sequence to see if it was safe. This is probably not
5896 if (RTL_EXPR_SEQUENCE (exp))
5899 exp_rtl = RTL_EXPR_RTL (exp);
5902 case WITH_CLEANUP_EXPR:
5903 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
5906 case CLEANUP_POINT_EXPR:
5907 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5910 exp_rtl = SAVE_EXPR_RTL (exp);
5914 /* If we've already scanned this, don't do it again. Otherwise,
5915 show we've scanned it and record for clearing the flag if we're
5917 if (TREE_PRIVATE (exp))
5920 TREE_PRIVATE (exp) = 1;
5921 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5923 TREE_PRIVATE (exp) = 0;
5927 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
5931 /* The only operand we look at is operand 1. The rest aren't
5932 part of the expression. */
5933 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5939 /* If we have an rtx, we do not need to scan our operands. */
5943 nops = first_rtl_op (TREE_CODE (exp));
5944 for (i = 0; i < nops; i++)
5945 if (TREE_OPERAND (exp, i) != 0
5946 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5949 /* If this is a language-specific tree code, it may require
5950 special handling. */
5951 if ((unsigned int) TREE_CODE (exp)
5952 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5953 && !lang_hooks.safe_from_p (x, exp))
5957 /* If we have an rtl, find any enclosed object. Then see if we conflict
5961 if (GET_CODE (exp_rtl) == SUBREG)
5963 exp_rtl = SUBREG_REG (exp_rtl);
5964 if (GET_CODE (exp_rtl) == REG
5965 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5969 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5970 are memory and they conflict. */
5971 return ! (rtx_equal_p (x, exp_rtl)
5972 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5973 && true_dependence (exp_rtl, VOIDmode, x,
5974 rtx_addr_varies_p)));
5977 /* If we reach here, it is safe. */
5981 /* Subroutine of expand_expr: return rtx if EXP is a
5982 variable or parameter; else return 0. */
5988 switch (TREE_CODE (exp))
5992 return DECL_RTL (exp);
5998 /* Return the highest power of two that EXP is known to be a multiple of.
5999 This is used in updating alignment of MEMs in array references. */
6001 static unsigned HOST_WIDE_INT
6002 highest_pow2_factor (tree exp)
6004 unsigned HOST_WIDE_INT c0, c1;
6006 switch (TREE_CODE (exp))
6009 /* We can find the lowest bit that's a one. If the low
6010 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6011 We need to handle this case since we can find it in a COND_EXPR,
6012 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
6013 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6015 if (TREE_CONSTANT_OVERFLOW (exp))
6016 return BIGGEST_ALIGNMENT;
6019 /* Note: tree_low_cst is intentionally not used here,
6020 we don't care about the upper bits. */
6021 c0 = TREE_INT_CST_LOW (exp);
6023 return c0 ? c0 : BIGGEST_ALIGNMENT;
6027 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6028 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6029 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6030 return MIN (c0, c1);
6033 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6034 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6037 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6039 if (integer_pow2p (TREE_OPERAND (exp, 1))
6040 && host_integerp (TREE_OPERAND (exp, 1), 1))
6042 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6043 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6044 return MAX (1, c0 / c1);
6048 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6050 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6053 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6056 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6057 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6058 return MIN (c0, c1);
6067 /* Similar, except that the alignment requirements of TARGET are
6068 taken into account. Assume it is at least as aligned as its
6069 type, unless it is a COMPONENT_REF in which case the layout of
6070 the structure gives the alignment. */
6072 static unsigned HOST_WIDE_INT
6073 highest_pow2_factor_for_target (tree target, tree exp)
6075 unsigned HOST_WIDE_INT target_align, factor;
6077 factor = highest_pow2_factor (exp);
6078 if (TREE_CODE (target) == COMPONENT_REF)
6079 target_align = DECL_ALIGN (TREE_OPERAND (target, 1)) / BITS_PER_UNIT;
6081 target_align = TYPE_ALIGN (TREE_TYPE (target)) / BITS_PER_UNIT;
6082 return MAX (factor, target_align);
6085 /* Subroutine of expand_expr. Expand the two operands of a binary
6086 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6087 The value may be stored in TARGET if TARGET is nonzero. The
6088 MODIFIER argument is as documented by expand_expr. */
6091 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6092 enum expand_modifier modifier)
6094 if (! safe_from_p (target, exp1, 1))
6096 if (operand_equal_p (exp0, exp1, 0))
6098 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6099 *op1 = copy_rtx (*op0);
6103 /* If we need to preserve evaluation order, copy exp0 into its own
6104 temporary variable so that it can't be clobbered by exp1. */
6105 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6106 exp0 = save_expr (exp0);
6107 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6108 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6113 /* expand_expr: generate code for computing expression EXP.
6114 An rtx for the computed value is returned. The value is never null.
6115 In the case of a void EXP, const0_rtx is returned.
6117 The value may be stored in TARGET if TARGET is nonzero.
6118 TARGET is just a suggestion; callers must assume that
6119 the rtx returned may not be the same as TARGET.
6121 If TARGET is CONST0_RTX, it means that the value will be ignored.
6123 If TMODE is not VOIDmode, it suggests generating the
6124 result in mode TMODE. But this is done only when convenient.
6125 Otherwise, TMODE is ignored and the value generated in its natural mode.
6126 TMODE is just a suggestion; callers must assume that
6127 the rtx returned may not have mode TMODE.
6129 Note that TARGET may have neither TMODE nor MODE. In that case, it
6130 probably will not be used.
6132 If MODIFIER is EXPAND_SUM then when EXP is an addition
6133 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6134 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6135 products as above, or REG or MEM, or constant.
6136 Ordinarily in such cases we would output mul or add instructions
6137 and then return a pseudo reg containing the sum.
6139 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6140 it also marks a label as absolutely required (it can't be dead).
6141 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6142 This is used for outputting expressions used in initializers.
6144 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6145 with a constant address even if that address is not normally legitimate.
6146 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6148 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6149 a call parameter. Such targets require special care as we haven't yet
6150 marked TARGET so that it's safe from being trashed by libcalls. We
6151 don't want to use TARGET for anything but the final result;
6152 Intermediate values must go elsewhere. Additionally, calls to
6153 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6155 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6156 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6157 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
6158 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6162 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
6163 enum expand_modifier modifier, rtx *alt_rtl)
6166 tree type = TREE_TYPE (exp);
6168 enum machine_mode mode;
6169 enum tree_code code = TREE_CODE (exp);
6171 rtx subtarget, original_target;
6175 /* Handle ERROR_MARK before anybody tries to access its type. */
6176 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6178 op0 = CONST0_RTX (tmode);
6184 mode = TYPE_MODE (type);
6185 unsignedp = TYPE_UNSIGNED (type);
6187 /* Use subtarget as the target for operand 0 of a binary operation. */
6188 subtarget = get_subtarget (target);
6189 original_target = target;
6190 ignore = (target == const0_rtx
6191 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6192 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6193 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
6194 && TREE_CODE (type) == VOID_TYPE));
6196 /* If we are going to ignore this result, we need only do something
6197 if there is a side-effect somewhere in the expression. If there
6198 is, short-circuit the most common cases here. Note that we must
6199 not call expand_expr with anything but const0_rtx in case this
6200 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6204 if (! TREE_SIDE_EFFECTS (exp))
6207 /* Ensure we reference a volatile object even if value is ignored, but
6208 don't do this if all we are doing is taking its address. */
6209 if (TREE_THIS_VOLATILE (exp)
6210 && TREE_CODE (exp) != FUNCTION_DECL
6211 && mode != VOIDmode && mode != BLKmode
6212 && modifier != EXPAND_CONST_ADDRESS)
6214 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6215 if (GET_CODE (temp) == MEM)
6216 temp = copy_to_reg (temp);
6220 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6221 || code == INDIRECT_REF || code == BUFFER_REF)
6222 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6225 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6226 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6228 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6229 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6232 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6233 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6234 /* If the second operand has no side effects, just evaluate
6236 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6238 else if (code == BIT_FIELD_REF)
6240 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6241 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6242 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6249 /* If will do cse, generate all results into pseudo registers
6250 since 1) that allows cse to find more things
6251 and 2) otherwise cse could produce an insn the machine
6252 cannot support. An exception is a CONSTRUCTOR into a multi-word
6253 MEM: that's much more likely to be most efficient into the MEM.
6254 Another is a CALL_EXPR which must return in memory. */
6256 if (! cse_not_expected && mode != BLKmode && target
6257 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
6258 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
6259 && ! (code == CALL_EXPR && aggregate_value_p (exp, exp)))
6266 tree function = decl_function_context (exp);
6267 /* Labels in containing functions, or labels used from initializers,
6269 if (modifier == EXPAND_INITIALIZER
6270 || (function != current_function_decl
6271 && function != inline_function_decl
6273 temp = force_label_rtx (exp);
6275 temp = label_rtx (exp);
6277 temp = gen_rtx_MEM (FUNCTION_MODE, gen_rtx_LABEL_REF (Pmode, temp));
6278 if (function != current_function_decl
6279 && function != inline_function_decl && function != 0)
6280 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6285 if (!DECL_RTL_SET_P (exp))
6287 error ("%Jprior parameter's size depends on '%D'", exp, exp);
6288 return CONST0_RTX (mode);
6291 /* ... fall through ... */
6294 /* If a static var's type was incomplete when the decl was written,
6295 but the type is complete now, lay out the decl now. */
6296 if (DECL_SIZE (exp) == 0
6297 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6298 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6299 layout_decl (exp, 0);
6301 /* ... fall through ... */
6305 if (DECL_RTL (exp) == 0)
6308 /* Ensure variable marked as used even if it doesn't go through
6309 a parser. If it hasn't be used yet, write out an external
6311 if (! TREE_USED (exp))
6313 assemble_external (exp);
6314 TREE_USED (exp) = 1;
6317 /* Show we haven't gotten RTL for this yet. */
6320 /* Handle variables inherited from containing functions. */
6321 context = decl_function_context (exp);
6323 /* We treat inline_function_decl as an alias for the current function
6324 because that is the inline function whose vars, types, etc.
6325 are being merged into the current function.
6326 See expand_inline_function. */
6328 if (context != 0 && context != current_function_decl
6329 && context != inline_function_decl
6330 /* If var is static, we don't need a static chain to access it. */
6331 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6332 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6336 /* Mark as non-local and addressable. */
6337 DECL_NONLOCAL (exp) = 1;
6338 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6340 lang_hooks.mark_addressable (exp);
6341 if (GET_CODE (DECL_RTL (exp)) != MEM)
6343 addr = XEXP (DECL_RTL (exp), 0);
6344 if (GET_CODE (addr) == MEM)
6346 = replace_equiv_address (addr,
6347 fix_lexical_addr (XEXP (addr, 0), exp));
6349 addr = fix_lexical_addr (addr, exp);
6351 temp = replace_equiv_address (DECL_RTL (exp), addr);
6354 /* This is the case of an array whose size is to be determined
6355 from its initializer, while the initializer is still being parsed.
6358 else if (GET_CODE (DECL_RTL (exp)) == MEM
6359 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6360 temp = validize_mem (DECL_RTL (exp));
6362 /* If DECL_RTL is memory, we are in the normal case and either
6363 the address is not valid or it is not a register and -fforce-addr
6364 is specified, get the address into a register. */
6366 else if (GET_CODE (DECL_RTL (exp)) == MEM
6367 && modifier != EXPAND_CONST_ADDRESS
6368 && modifier != EXPAND_SUM
6369 && modifier != EXPAND_INITIALIZER
6370 && (! memory_address_p (DECL_MODE (exp),
6371 XEXP (DECL_RTL (exp), 0))
6373 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6376 *alt_rtl = DECL_RTL (exp);
6377 temp = replace_equiv_address (DECL_RTL (exp),
6378 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6381 /* If we got something, return it. But first, set the alignment
6382 if the address is a register. */
6385 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6386 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6391 /* If the mode of DECL_RTL does not match that of the decl, it
6392 must be a promoted value. We return a SUBREG of the wanted mode,
6393 but mark it so that we know that it was already extended. */
6395 if (GET_CODE (DECL_RTL (exp)) == REG
6396 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6398 /* Get the signedness used for this variable. Ensure we get the
6399 same mode we got when the variable was declared. */
6400 if (GET_MODE (DECL_RTL (exp))
6401 != promote_mode (type, DECL_MODE (exp), &unsignedp,
6402 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
6405 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6406 SUBREG_PROMOTED_VAR_P (temp) = 1;
6407 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6411 return DECL_RTL (exp);
6414 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6415 TREE_INT_CST_HIGH (exp), mode);
6417 /* ??? If overflow is set, fold will have done an incomplete job,
6418 which can result in (plus xx (const_int 0)), which can get
6419 simplified by validate_replace_rtx during virtual register
6420 instantiation, which can result in unrecognizable insns.
6421 Avoid this by forcing all overflows into registers. */
6422 if (TREE_CONSTANT_OVERFLOW (exp)
6423 && modifier != EXPAND_INITIALIZER)
6424 temp = force_reg (mode, temp);
6429 return const_vector_from_tree (exp);
6432 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
6435 /* If optimized, generate immediate CONST_DOUBLE
6436 which will be turned into memory by reload if necessary.
6438 We used to force a register so that loop.c could see it. But
6439 this does not allow gen_* patterns to perform optimizations with
6440 the constants. It also produces two insns in cases like "x = 1.0;".
6441 On most machines, floating-point constants are not permitted in
6442 many insns, so we'd end up copying it to a register in any case.
6444 Now, we do the copying in expand_binop, if appropriate. */
6445 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6446 TYPE_MODE (TREE_TYPE (exp)));
6449 /* Handle evaluating a complex constant in a CONCAT target. */
6450 if (original_target && GET_CODE (original_target) == CONCAT)
6452 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6455 rtarg = XEXP (original_target, 0);
6456 itarg = XEXP (original_target, 1);
6458 /* Move the real and imaginary parts separately. */
6459 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
6460 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
6463 emit_move_insn (rtarg, op0);
6465 emit_move_insn (itarg, op1);
6467 return original_target;
6470 /* ... fall through ... */
6473 temp = output_constant_def (exp, 1);
6475 /* temp contains a constant address.
6476 On RISC machines where a constant address isn't valid,
6477 make some insns to get that address into a register. */
6478 if (modifier != EXPAND_CONST_ADDRESS
6479 && modifier != EXPAND_INITIALIZER
6480 && modifier != EXPAND_SUM
6481 && (! memory_address_p (mode, XEXP (temp, 0))
6482 || flag_force_addr))
6483 return replace_equiv_address (temp,
6484 copy_rtx (XEXP (temp, 0)));
6487 case EXPR_WITH_FILE_LOCATION:
6490 struct file_stack fs;
6492 fs.location = input_location;
6493 fs.next = expr_wfl_stack;
6494 input_filename = EXPR_WFL_FILENAME (exp);
6495 input_line = EXPR_WFL_LINENO (exp);
6496 expr_wfl_stack = &fs;
6497 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6498 emit_line_note (input_location);
6499 /* Possibly avoid switching back and forth here. */
6500 to_return = expand_expr (EXPR_WFL_NODE (exp),
6501 (ignore ? const0_rtx : target),
6503 if (expr_wfl_stack != &fs)
6505 input_location = fs.location;
6506 expr_wfl_stack = fs.next;
6511 context = decl_function_context (exp);
6513 /* If this SAVE_EXPR was at global context, assume we are an
6514 initialization function and move it into our context. */
6516 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6518 /* We treat inline_function_decl as an alias for the current function
6519 because that is the inline function whose vars, types, etc.
6520 are being merged into the current function.
6521 See expand_inline_function. */
6522 if (context == current_function_decl || context == inline_function_decl)
6525 /* If this is non-local, handle it. */
6528 /* The following call just exists to abort if the context is
6529 not of a containing function. */
6530 find_function_data (context);
6532 temp = SAVE_EXPR_RTL (exp);
6533 if (temp && GET_CODE (temp) == REG)
6535 put_var_into_stack (exp, /*rescan=*/true);
6536 temp = SAVE_EXPR_RTL (exp);
6538 if (temp == 0 || GET_CODE (temp) != MEM)
6541 replace_equiv_address (temp,
6542 fix_lexical_addr (XEXP (temp, 0), exp));
6544 if (SAVE_EXPR_RTL (exp) == 0)
6546 if (mode == VOIDmode)
6549 temp = assign_temp (build_qualified_type (type,
6551 | TYPE_QUAL_CONST)),
6554 SAVE_EXPR_RTL (exp) = temp;
6555 if (!optimize && GET_CODE (temp) == REG)
6556 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6559 /* If the mode of TEMP does not match that of the expression, it
6560 must be a promoted value. We pass store_expr a SUBREG of the
6561 wanted mode but mark it so that we know that it was already
6564 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6566 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6567 promote_mode (type, mode, &unsignedp, 0);
6568 SUBREG_PROMOTED_VAR_P (temp) = 1;
6569 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6572 if (temp == const0_rtx)
6573 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6575 store_expr (TREE_OPERAND (exp, 0), temp,
6576 modifier == EXPAND_STACK_PARM ? 2 : 0);
6578 TREE_USED (exp) = 1;
6581 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6582 must be a promoted value. We return a SUBREG of the wanted mode,
6583 but mark it so that we know that it was already extended. */
6585 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6586 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6588 /* Compute the signedness and make the proper SUBREG. */
6589 promote_mode (type, mode, &unsignedp, 0);
6590 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6591 SUBREG_PROMOTED_VAR_P (temp) = 1;
6592 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6596 return SAVE_EXPR_RTL (exp);
6601 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6602 TREE_OPERAND (exp, 0)
6603 = lang_hooks.unsave_expr_now (TREE_OPERAND (exp, 0));
6608 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6609 expand_goto (TREE_OPERAND (exp, 0));
6611 expand_computed_goto (TREE_OPERAND (exp, 0));
6615 expand_exit_loop_if_false (NULL,
6616 invert_truthvalue (TREE_OPERAND (exp, 0)));
6619 case LABELED_BLOCK_EXPR:
6620 if (LABELED_BLOCK_BODY (exp))
6621 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1);
6622 /* Should perhaps use expand_label, but this is simpler and safer. */
6623 do_pending_stack_adjust ();
6624 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6627 case EXIT_BLOCK_EXPR:
6628 if (EXIT_BLOCK_RETURN (exp))
6629 sorry ("returned value in block_exit_expr");
6630 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6635 expand_start_loop (1);
6636 expand_expr_stmt_value (TREE_OPERAND (exp, 0), 0, 1);
6644 tree vars = TREE_OPERAND (exp, 0);
6646 /* Need to open a binding contour here because
6647 if there are any cleanups they must be contained here. */
6648 expand_start_bindings (2);
6650 /* Mark the corresponding BLOCK for output in its proper place. */
6651 if (TREE_OPERAND (exp, 2) != 0
6652 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6653 lang_hooks.decls.insert_block (TREE_OPERAND (exp, 2));
6655 /* If VARS have not yet been expanded, expand them now. */
6658 if (!DECL_RTL_SET_P (vars))
6660 expand_decl_init (vars);
6661 vars = TREE_CHAIN (vars);
6664 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
6666 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6672 if (RTL_EXPR_SEQUENCE (exp))
6674 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6676 emit_insn (RTL_EXPR_SEQUENCE (exp));
6677 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6679 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6680 free_temps_for_rtl_expr (exp);
6682 *alt_rtl = RTL_EXPR_ALT_RTL (exp);
6683 return RTL_EXPR_RTL (exp);
6686 /* If we don't need the result, just ensure we evaluate any
6692 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6693 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
6698 /* All elts simple constants => refer to a constant in memory. But
6699 if this is a non-BLKmode mode, let it store a field at a time
6700 since that should make a CONST_INT or CONST_DOUBLE when we
6701 fold. Likewise, if we have a target we can use, it is best to
6702 store directly into the target unless the type is large enough
6703 that memcpy will be used. If we are making an initializer and
6704 all operands are constant, put it in memory as well.
6706 FIXME: Avoid trying to fill vector constructors piece-meal.
6707 Output them with output_constant_def below unless we're sure
6708 they're zeros. This should go away when vector initializers
6709 are treated like VECTOR_CST instead of arrays.
6711 else if ((TREE_STATIC (exp)
6712 && ((mode == BLKmode
6713 && ! (target != 0 && safe_from_p (target, exp, 1)))
6714 || TREE_ADDRESSABLE (exp)
6715 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6716 && (! MOVE_BY_PIECES_P
6717 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6719 && ((TREE_CODE (type) == VECTOR_TYPE
6720 && !is_zeros_p (exp))
6721 || ! mostly_zeros_p (exp)))))
6722 || ((modifier == EXPAND_INITIALIZER
6723 || modifier == EXPAND_CONST_ADDRESS)
6724 && TREE_CONSTANT (exp)))
6726 rtx constructor = output_constant_def (exp, 1);
6728 if (modifier != EXPAND_CONST_ADDRESS
6729 && modifier != EXPAND_INITIALIZER
6730 && modifier != EXPAND_SUM)
6731 constructor = validize_mem (constructor);
6737 /* Handle calls that pass values in multiple non-contiguous
6738 locations. The Irix 6 ABI has examples of this. */
6739 if (target == 0 || ! safe_from_p (target, exp, 1)
6740 || GET_CODE (target) == PARALLEL
6741 || modifier == EXPAND_STACK_PARM)
6743 = assign_temp (build_qualified_type (type,
6745 | (TREE_READONLY (exp)
6746 * TYPE_QUAL_CONST))),
6747 0, TREE_ADDRESSABLE (exp), 1);
6749 store_constructor (exp, target, 0, int_expr_size (exp));
6755 tree exp1 = TREE_OPERAND (exp, 0);
6757 tree string = string_constant (exp1, &index);
6759 /* Try to optimize reads from const strings. */
6761 && TREE_CODE (string) == STRING_CST
6762 && TREE_CODE (index) == INTEGER_CST
6763 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6764 && GET_MODE_CLASS (mode) == MODE_INT
6765 && GET_MODE_SIZE (mode) == 1
6766 && modifier != EXPAND_WRITE)
6767 return gen_int_mode (TREE_STRING_POINTER (string)
6768 [TREE_INT_CST_LOW (index)], mode);
6770 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6771 op0 = memory_address (mode, op0);
6772 temp = gen_rtx_MEM (mode, op0);
6773 set_mem_attributes (temp, exp, 0);
6775 /* If we are writing to this object and its type is a record with
6776 readonly fields, we must mark it as readonly so it will
6777 conflict with readonly references to those fields. */
6778 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
6779 RTX_UNCHANGING_P (temp) = 1;
6785 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6789 tree array = TREE_OPERAND (exp, 0);
6790 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6791 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6792 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6795 /* Optimize the special-case of a zero lower bound.
6797 We convert the low_bound to sizetype to avoid some problems
6798 with constant folding. (E.g. suppose the lower bound is 1,
6799 and its mode is QI. Without the conversion, (ARRAY
6800 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6801 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6803 if (! integer_zerop (low_bound))
6804 index = size_diffop (index, convert (sizetype, low_bound));
6806 /* Fold an expression like: "foo"[2].
6807 This is not done in fold so it won't happen inside &.
6808 Don't fold if this is for wide characters since it's too
6809 difficult to do correctly and this is a very rare case. */
6811 if (modifier != EXPAND_CONST_ADDRESS
6812 && modifier != EXPAND_INITIALIZER
6813 && modifier != EXPAND_MEMORY
6814 && TREE_CODE (array) == STRING_CST
6815 && TREE_CODE (index) == INTEGER_CST
6816 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6817 && GET_MODE_CLASS (mode) == MODE_INT
6818 && GET_MODE_SIZE (mode) == 1)
6819 return gen_int_mode (TREE_STRING_POINTER (array)
6820 [TREE_INT_CST_LOW (index)], mode);
6822 /* If this is a constant index into a constant array,
6823 just get the value from the array. Handle both the cases when
6824 we have an explicit constructor and when our operand is a variable
6825 that was declared const. */
6827 if (modifier != EXPAND_CONST_ADDRESS
6828 && modifier != EXPAND_INITIALIZER
6829 && modifier != EXPAND_MEMORY
6830 && TREE_CODE (array) == CONSTRUCTOR
6831 && ! TREE_SIDE_EFFECTS (array)
6832 && TREE_CODE (index) == INTEGER_CST
6833 && 0 > compare_tree_int (index,
6834 list_length (CONSTRUCTOR_ELTS
6835 (TREE_OPERAND (exp, 0)))))
6839 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6840 i = TREE_INT_CST_LOW (index);
6841 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6845 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
6849 else if (optimize >= 1
6850 && modifier != EXPAND_CONST_ADDRESS
6851 && modifier != EXPAND_INITIALIZER
6852 && modifier != EXPAND_MEMORY
6853 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6854 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6855 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
6856 && targetm.binds_local_p (array))
6858 if (TREE_CODE (index) == INTEGER_CST)
6860 tree init = DECL_INITIAL (array);
6862 if (TREE_CODE (init) == CONSTRUCTOR)
6866 for (elem = CONSTRUCTOR_ELTS (init);
6868 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6869 elem = TREE_CHAIN (elem))
6872 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6873 return expand_expr (fold (TREE_VALUE (elem)), target,
6876 else if (TREE_CODE (init) == STRING_CST
6877 && 0 > compare_tree_int (index,
6878 TREE_STRING_LENGTH (init)))
6880 tree type = TREE_TYPE (TREE_TYPE (init));
6881 enum machine_mode mode = TYPE_MODE (type);
6883 if (GET_MODE_CLASS (mode) == MODE_INT
6884 && GET_MODE_SIZE (mode) == 1)
6885 return gen_int_mode (TREE_STRING_POINTER (init)
6886 [TREE_INT_CST_LOW (index)], mode);
6891 goto normal_inner_ref;
6894 /* If the operand is a CONSTRUCTOR, we can just extract the
6895 appropriate field if it is present. */
6896 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
6900 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6901 elt = TREE_CHAIN (elt))
6902 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6903 /* We can normally use the value of the field in the
6904 CONSTRUCTOR. However, if this is a bitfield in
6905 an integral mode that we can fit in a HOST_WIDE_INT,
6906 we must mask only the number of bits in the bitfield,
6907 since this is done implicitly by the constructor. If
6908 the bitfield does not meet either of those conditions,
6909 we can't do this optimization. */
6910 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6911 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6913 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6914 <= HOST_BITS_PER_WIDE_INT))))
6916 if (DECL_BIT_FIELD (TREE_PURPOSE (elt))
6917 && modifier == EXPAND_STACK_PARM)
6919 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6920 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6922 HOST_WIDE_INT bitsize
6923 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6924 enum machine_mode imode
6925 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6927 if (TYPE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6929 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6930 op0 = expand_and (imode, op0, op1, target);
6935 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6938 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6940 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6948 goto normal_inner_ref;
6951 case ARRAY_RANGE_REF:
6954 enum machine_mode mode1;
6955 HOST_WIDE_INT bitsize, bitpos;
6958 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6959 &mode1, &unsignedp, &volatilep);
6962 /* If we got back the original object, something is wrong. Perhaps
6963 we are evaluating an expression too early. In any event, don't
6964 infinitely recurse. */
6968 /* If TEM's type is a union of variable size, pass TARGET to the inner
6969 computation, since it will need a temporary and TARGET is known
6970 to have to do. This occurs in unchecked conversion in Ada. */
6974 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6975 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6977 && modifier != EXPAND_STACK_PARM
6978 ? target : NULL_RTX),
6980 (modifier == EXPAND_INITIALIZER
6981 || modifier == EXPAND_CONST_ADDRESS
6982 || modifier == EXPAND_STACK_PARM)
6983 ? modifier : EXPAND_NORMAL);
6985 /* If this is a constant, put it into a register if it is a
6986 legitimate constant and OFFSET is 0 and memory if it isn't. */
6987 if (CONSTANT_P (op0))
6989 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
6990 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
6992 op0 = force_reg (mode, op0);
6994 op0 = validize_mem (force_const_mem (mode, op0));
6997 /* Otherwise, if this object not in memory and we either have an
6998 offset or a BLKmode result, put it there. This case can't occur in
6999 C, but can in Ada if we have unchecked conversion of an expression
7000 from a scalar type to an array or record type or for an
7001 ARRAY_RANGE_REF whose type is BLKmode. */
7002 else if (GET_CODE (op0) != MEM
7004 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
7006 /* If the operand is a SAVE_EXPR, we can deal with this by
7007 forcing the SAVE_EXPR into memory. */
7008 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
7010 put_var_into_stack (TREE_OPERAND (exp, 0),
7012 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7017 = build_qualified_type (TREE_TYPE (tem),
7018 (TYPE_QUALS (TREE_TYPE (tem))
7019 | TYPE_QUAL_CONST));
7020 rtx memloc = assign_temp (nt, 1, 1, 1);
7022 emit_move_insn (memloc, op0);
7029 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7032 if (GET_CODE (op0) != MEM)
7035 #ifdef POINTERS_EXTEND_UNSIGNED
7036 if (GET_MODE (offset_rtx) != Pmode)
7037 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7039 if (GET_MODE (offset_rtx) != ptr_mode)
7040 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7043 if (GET_MODE (op0) == BLKmode
7044 /* A constant address in OP0 can have VOIDmode, we must
7045 not try to call force_reg in that case. */
7046 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7048 && (bitpos % bitsize) == 0
7049 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7050 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7052 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7056 op0 = offset_address (op0, offset_rtx,
7057 highest_pow2_factor (offset));
7060 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7061 record its alignment as BIGGEST_ALIGNMENT. */
7062 if (GET_CODE (op0) == MEM && bitpos == 0 && offset != 0
7063 && is_aligning_offset (offset, tem))
7064 set_mem_align (op0, BIGGEST_ALIGNMENT);
7066 /* Don't forget about volatility even if this is a bitfield. */
7067 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7069 if (op0 == orig_op0)
7070 op0 = copy_rtx (op0);
7072 MEM_VOLATILE_P (op0) = 1;
7075 /* The following code doesn't handle CONCAT.
7076 Assume only bitpos == 0 can be used for CONCAT, due to
7077 one element arrays having the same mode as its element. */
7078 if (GET_CODE (op0) == CONCAT)
7080 if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
7085 /* In cases where an aligned union has an unaligned object
7086 as a field, we might be extracting a BLKmode value from
7087 an integer-mode (e.g., SImode) object. Handle this case
7088 by doing the extract into an object as wide as the field
7089 (which we know to be the width of a basic mode), then
7090 storing into memory, and changing the mode to BLKmode. */
7091 if (mode1 == VOIDmode
7092 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7093 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7094 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7095 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7096 && modifier != EXPAND_CONST_ADDRESS
7097 && modifier != EXPAND_INITIALIZER)
7098 /* If the field isn't aligned enough to fetch as a memref,
7099 fetch it as a bit field. */
7100 || (mode1 != BLKmode
7101 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7102 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
7103 || (GET_CODE (op0) == MEM
7104 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
7105 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
7106 && ((modifier == EXPAND_CONST_ADDRESS
7107 || modifier == EXPAND_INITIALIZER)
7109 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
7110 || (bitpos % BITS_PER_UNIT != 0)))
7111 /* If the type and the field are a constant size and the
7112 size of the type isn't the same size as the bitfield,
7113 we must use bitfield operations. */
7115 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7117 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7120 enum machine_mode ext_mode = mode;
7122 if (ext_mode == BLKmode
7123 && ! (target != 0 && GET_CODE (op0) == MEM
7124 && GET_CODE (target) == MEM
7125 && bitpos % BITS_PER_UNIT == 0))
7126 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7128 if (ext_mode == BLKmode)
7131 target = assign_temp (type, 0, 1, 1);
7136 /* In this case, BITPOS must start at a byte boundary and
7137 TARGET, if specified, must be a MEM. */
7138 if (GET_CODE (op0) != MEM
7139 || (target != 0 && GET_CODE (target) != MEM)
7140 || bitpos % BITS_PER_UNIT != 0)
7143 emit_block_move (target,
7144 adjust_address (op0, VOIDmode,
7145 bitpos / BITS_PER_UNIT),
7146 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7148 (modifier == EXPAND_STACK_PARM
7149 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7154 op0 = validize_mem (op0);
7156 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7157 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7159 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7160 (modifier == EXPAND_STACK_PARM
7161 ? NULL_RTX : target),
7163 int_size_in_bytes (TREE_TYPE (tem)));
7165 /* If the result is a record type and BITSIZE is narrower than
7166 the mode of OP0, an integral mode, and this is a big endian
7167 machine, we must put the field into the high-order bits. */
7168 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7169 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7170 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7171 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7172 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7176 /* If the result type is BLKmode, store the data into a temporary
7177 of the appropriate type, but with the mode corresponding to the
7178 mode for the data we have (op0's mode). It's tempting to make
7179 this a constant type, since we know it's only being stored once,
7180 but that can cause problems if we are taking the address of this
7181 COMPONENT_REF because the MEM of any reference via that address
7182 will have flags corresponding to the type, which will not
7183 necessarily be constant. */
7184 if (mode == BLKmode)
7187 = assign_stack_temp_for_type
7188 (ext_mode, GET_MODE_BITSIZE (ext_mode), 0, type);
7190 emit_move_insn (new, op0);
7191 op0 = copy_rtx (new);
7192 PUT_MODE (op0, BLKmode);
7193 set_mem_attributes (op0, exp, 1);
7199 /* If the result is BLKmode, use that to access the object
7201 if (mode == BLKmode)
7204 /* Get a reference to just this component. */
7205 if (modifier == EXPAND_CONST_ADDRESS
7206 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7207 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7209 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7211 if (op0 == orig_op0)
7212 op0 = copy_rtx (op0);
7214 set_mem_attributes (op0, exp, 0);
7215 if (GET_CODE (XEXP (op0, 0)) == REG)
7216 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7218 MEM_VOLATILE_P (op0) |= volatilep;
7219 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7220 || modifier == EXPAND_CONST_ADDRESS
7221 || modifier == EXPAND_INITIALIZER)
7223 else if (target == 0)
7224 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7226 convert_move (target, op0, unsignedp);
7232 rtx insn, before = get_last_insn (), vtbl_ref;
7234 /* Evaluate the interior expression. */
7235 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7238 /* Get or create an instruction off which to hang a note. */
7239 if (REG_P (subtarget))
7242 insn = get_last_insn ();
7245 if (! INSN_P (insn))
7246 insn = prev_nonnote_insn (insn);
7250 target = gen_reg_rtx (GET_MODE (subtarget));
7251 insn = emit_move_insn (target, subtarget);
7254 /* Collect the data for the note. */
7255 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7256 vtbl_ref = plus_constant (vtbl_ref,
7257 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7258 /* Discard the initial CONST that was added. */
7259 vtbl_ref = XEXP (vtbl_ref, 0);
7262 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7267 /* Intended for a reference to a buffer of a file-object in Pascal.
7268 But it's not certain that a special tree code will really be
7269 necessary for these. INDIRECT_REF might work for them. */
7275 /* Pascal set IN expression.
7278 rlo = set_low - (set_low%bits_per_word);
7279 the_word = set [ (index - rlo)/bits_per_word ];
7280 bit_index = index % bits_per_word;
7281 bitmask = 1 << bit_index;
7282 return !!(the_word & bitmask); */
7284 tree set = TREE_OPERAND (exp, 0);
7285 tree index = TREE_OPERAND (exp, 1);
7286 int iunsignedp = TYPE_UNSIGNED (TREE_TYPE (index));
7287 tree set_type = TREE_TYPE (set);
7288 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7289 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7290 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7291 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7292 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7293 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7294 rtx setaddr = XEXP (setval, 0);
7295 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7297 rtx diff, quo, rem, addr, bit, result;
7299 /* If domain is empty, answer is no. Likewise if index is constant
7300 and out of bounds. */
7301 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7302 && TREE_CODE (set_low_bound) == INTEGER_CST
7303 && tree_int_cst_lt (set_high_bound, set_low_bound))
7304 || (TREE_CODE (index) == INTEGER_CST
7305 && TREE_CODE (set_low_bound) == INTEGER_CST
7306 && tree_int_cst_lt (index, set_low_bound))
7307 || (TREE_CODE (set_high_bound) == INTEGER_CST
7308 && TREE_CODE (index) == INTEGER_CST
7309 && tree_int_cst_lt (set_high_bound, index))))
7313 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7315 /* If we get here, we have to generate the code for both cases
7316 (in range and out of range). */
7318 op0 = gen_label_rtx ();
7319 op1 = gen_label_rtx ();
7321 if (! (GET_CODE (index_val) == CONST_INT
7322 && GET_CODE (lo_r) == CONST_INT))
7323 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7324 GET_MODE (index_val), iunsignedp, op1);
7326 if (! (GET_CODE (index_val) == CONST_INT
7327 && GET_CODE (hi_r) == CONST_INT))
7328 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7329 GET_MODE (index_val), iunsignedp, op1);
7331 /* Calculate the element number of bit zero in the first word
7333 if (GET_CODE (lo_r) == CONST_INT)
7334 rlow = GEN_INT (INTVAL (lo_r)
7335 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7337 rlow = expand_binop (index_mode, and_optab, lo_r,
7338 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7339 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7341 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7342 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7344 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7345 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7346 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7347 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7349 addr = memory_address (byte_mode,
7350 expand_binop (index_mode, add_optab, diff,
7351 setaddr, NULL_RTX, iunsignedp,
7354 /* Extract the bit we want to examine. */
7355 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7356 gen_rtx_MEM (byte_mode, addr),
7357 make_tree (TREE_TYPE (index), rem),
7359 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7360 GET_MODE (target) == byte_mode ? target : 0,
7361 1, OPTAB_LIB_WIDEN);
7363 if (result != target)
7364 convert_move (target, result, 1);
7366 /* Output the code to handle the out-of-range case. */
7369 emit_move_insn (target, const0_rtx);
7374 case WITH_CLEANUP_EXPR:
7375 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7377 WITH_CLEANUP_EXPR_RTL (exp)
7378 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7379 expand_decl_cleanup_eh (NULL_TREE, TREE_OPERAND (exp, 1),
7380 CLEANUP_EH_ONLY (exp));
7382 /* That's it for this cleanup. */
7383 TREE_OPERAND (exp, 1) = 0;
7385 return WITH_CLEANUP_EXPR_RTL (exp);
7387 case CLEANUP_POINT_EXPR:
7389 /* Start a new binding layer that will keep track of all cleanup
7390 actions to be performed. */
7391 expand_start_bindings (2);
7393 target_temp_slot_level = temp_slot_level;
7395 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7396 /* If we're going to use this value, load it up now. */
7398 op0 = force_not_mem (op0);
7399 preserve_temp_slots (op0);
7400 expand_end_bindings (NULL_TREE, 0, 0);
7405 /* Check for a built-in function. */
7406 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7407 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7409 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7411 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7412 == BUILT_IN_FRONTEND)
7413 /* ??? Use (*fun) form because expand_expr is a macro. */
7414 return (*lang_hooks.expand_expr) (exp, original_target,
7418 return expand_builtin (exp, target, subtarget, tmode, ignore);
7421 return expand_call (exp, target, ignore);
7423 case NON_LVALUE_EXPR:
7426 case REFERENCE_EXPR:
7427 if (TREE_OPERAND (exp, 0) == error_mark_node)
7430 if (TREE_CODE (type) == UNION_TYPE)
7432 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7434 /* If both input and output are BLKmode, this conversion isn't doing
7435 anything except possibly changing memory attribute. */
7436 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7438 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7441 result = copy_rtx (result);
7442 set_mem_attributes (result, exp, 0);
7448 if (TYPE_MODE (type) != BLKmode)
7449 target = gen_reg_rtx (TYPE_MODE (type));
7451 target = assign_temp (type, 0, 1, 1);
7454 if (GET_CODE (target) == MEM)
7455 /* Store data into beginning of memory target. */
7456 store_expr (TREE_OPERAND (exp, 0),
7457 adjust_address (target, TYPE_MODE (valtype), 0),
7458 modifier == EXPAND_STACK_PARM ? 2 : 0);
7460 else if (GET_CODE (target) == REG)
7461 /* Store this field into a union of the proper type. */
7462 store_field (target,
7463 MIN ((int_size_in_bytes (TREE_TYPE
7464 (TREE_OPERAND (exp, 0)))
7466 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7467 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7468 VOIDmode, 0, type, 0);
7472 /* Return the entire union. */
7476 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7478 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7481 /* If the signedness of the conversion differs and OP0 is
7482 a promoted SUBREG, clear that indication since we now
7483 have to do the proper extension. */
7484 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7485 && GET_CODE (op0) == SUBREG)
7486 SUBREG_PROMOTED_VAR_P (op0) = 0;
7491 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7492 if (GET_MODE (op0) == mode)
7495 /* If OP0 is a constant, just convert it into the proper mode. */
7496 if (CONSTANT_P (op0))
7498 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7499 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7501 if (modifier == EXPAND_INITIALIZER)
7502 return simplify_gen_subreg (mode, op0, inner_mode,
7503 subreg_lowpart_offset (mode,
7506 return convert_modes (mode, inner_mode, op0,
7507 TYPE_UNSIGNED (inner_type));
7510 if (modifier == EXPAND_INITIALIZER)
7511 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7515 convert_to_mode (mode, op0,
7516 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7518 convert_move (target, op0,
7519 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7522 case VIEW_CONVERT_EXPR:
7523 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7525 /* If the input and output modes are both the same, we are done.
7526 Otherwise, if neither mode is BLKmode and both are integral and within
7527 a word, we can use gen_lowpart. If neither is true, make sure the
7528 operand is in memory and convert the MEM to the new mode. */
7529 if (TYPE_MODE (type) == GET_MODE (op0))
7531 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7532 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7533 && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT
7534 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7535 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7536 op0 = gen_lowpart (TYPE_MODE (type), op0);
7537 else if (GET_CODE (op0) != MEM)
7539 /* If the operand is not a MEM, force it into memory. Since we
7540 are going to be be changing the mode of the MEM, don't call
7541 force_const_mem for constants because we don't allow pool
7542 constants to change mode. */
7543 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7545 if (TREE_ADDRESSABLE (exp))
7548 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7550 = assign_stack_temp_for_type
7551 (TYPE_MODE (inner_type),
7552 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7554 emit_move_insn (target, op0);
7558 /* At this point, OP0 is in the correct mode. If the output type is such
7559 that the operand is known to be aligned, indicate that it is.
7560 Otherwise, we need only be concerned about alignment for non-BLKmode
7562 if (GET_CODE (op0) == MEM)
7564 op0 = copy_rtx (op0);
7566 if (TYPE_ALIGN_OK (type))
7567 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7568 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7569 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7571 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7572 HOST_WIDE_INT temp_size
7573 = MAX (int_size_in_bytes (inner_type),
7574 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7575 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7576 temp_size, 0, type);
7577 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7579 if (TREE_ADDRESSABLE (exp))
7582 if (GET_MODE (op0) == BLKmode)
7583 emit_block_move (new_with_op0_mode, op0,
7584 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7585 (modifier == EXPAND_STACK_PARM
7586 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7588 emit_move_insn (new_with_op0_mode, op0);
7593 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7599 this_optab = ! unsignedp && flag_trapv
7600 && (GET_MODE_CLASS (mode) == MODE_INT)
7601 ? addv_optab : add_optab;
7603 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7604 something else, make sure we add the register to the constant and
7605 then to the other thing. This case can occur during strength
7606 reduction and doing it this way will produce better code if the
7607 frame pointer or argument pointer is eliminated.
7609 fold-const.c will ensure that the constant is always in the inner
7610 PLUS_EXPR, so the only case we need to do anything about is if
7611 sp, ap, or fp is our second argument, in which case we must swap
7612 the innermost first argument and our second argument. */
7614 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7615 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7616 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7617 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7618 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7619 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7621 tree t = TREE_OPERAND (exp, 1);
7623 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7624 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7627 /* If the result is to be ptr_mode and we are adding an integer to
7628 something, we might be forming a constant. So try to use
7629 plus_constant. If it produces a sum and we can't accept it,
7630 use force_operand. This allows P = &ARR[const] to generate
7631 efficient code on machines where a SYMBOL_REF is not a valid
7634 If this is an EXPAND_SUM call, always return the sum. */
7635 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7636 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7638 if (modifier == EXPAND_STACK_PARM)
7640 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7641 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7642 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7646 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7648 /* Use immed_double_const to ensure that the constant is
7649 truncated according to the mode of OP1, then sign extended
7650 to a HOST_WIDE_INT. Using the constant directly can result
7651 in non-canonical RTL in a 64x32 cross compile. */
7653 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7655 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7656 op1 = plus_constant (op1, INTVAL (constant_part));
7657 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7658 op1 = force_operand (op1, target);
7662 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7663 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7664 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7668 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7669 (modifier == EXPAND_INITIALIZER
7670 ? EXPAND_INITIALIZER : EXPAND_SUM));
7671 if (! CONSTANT_P (op0))
7673 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7674 VOIDmode, modifier);
7675 /* Return a PLUS if modifier says it's OK. */
7676 if (modifier == EXPAND_SUM
7677 || modifier == EXPAND_INITIALIZER)
7678 return simplify_gen_binary (PLUS, mode, op0, op1);
7681 /* Use immed_double_const to ensure that the constant is
7682 truncated according to the mode of OP1, then sign extended
7683 to a HOST_WIDE_INT. Using the constant directly can result
7684 in non-canonical RTL in a 64x32 cross compile. */
7686 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7688 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7689 op0 = plus_constant (op0, INTVAL (constant_part));
7690 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7691 op0 = force_operand (op0, target);
7696 /* No sense saving up arithmetic to be done
7697 if it's all in the wrong mode to form part of an address.
7698 And force_operand won't know whether to sign-extend or
7700 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7701 || mode != ptr_mode)
7703 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7704 subtarget, &op0, &op1, 0);
7705 if (op0 == const0_rtx)
7707 if (op1 == const0_rtx)
7712 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7713 subtarget, &op0, &op1, modifier);
7714 return simplify_gen_binary (PLUS, mode, op0, op1);
7717 /* For initializers, we are allowed to return a MINUS of two
7718 symbolic constants. Here we handle all cases when both operands
7720 /* Handle difference of two symbolic constants,
7721 for the sake of an initializer. */
7722 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7723 && really_constant_p (TREE_OPERAND (exp, 0))
7724 && really_constant_p (TREE_OPERAND (exp, 1)))
7726 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7727 NULL_RTX, &op0, &op1, modifier);
7729 /* If the last operand is a CONST_INT, use plus_constant of
7730 the negated constant. Else make the MINUS. */
7731 if (GET_CODE (op1) == CONST_INT)
7732 return plus_constant (op0, - INTVAL (op1));
7734 return gen_rtx_MINUS (mode, op0, op1);
7737 this_optab = ! unsignedp && flag_trapv
7738 && (GET_MODE_CLASS(mode) == MODE_INT)
7739 ? subv_optab : sub_optab;
7741 /* No sense saving up arithmetic to be done
7742 if it's all in the wrong mode to form part of an address.
7743 And force_operand won't know whether to sign-extend or
7745 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7746 || mode != ptr_mode)
7749 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7750 subtarget, &op0, &op1, modifier);
7752 /* Convert A - const to A + (-const). */
7753 if (GET_CODE (op1) == CONST_INT)
7755 op1 = negate_rtx (mode, op1);
7756 return simplify_gen_binary (PLUS, mode, op0, op1);
7762 /* If first operand is constant, swap them.
7763 Thus the following special case checks need only
7764 check the second operand. */
7765 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7767 tree t1 = TREE_OPERAND (exp, 0);
7768 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7769 TREE_OPERAND (exp, 1) = t1;
7772 /* Attempt to return something suitable for generating an
7773 indexed address, for machines that support that. */
7775 if (modifier == EXPAND_SUM && mode == ptr_mode
7776 && host_integerp (TREE_OPERAND (exp, 1), 0))
7778 tree exp1 = TREE_OPERAND (exp, 1);
7780 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7783 if (GET_CODE (op0) != REG)
7784 op0 = force_operand (op0, NULL_RTX);
7785 if (GET_CODE (op0) != REG)
7786 op0 = copy_to_mode_reg (mode, op0);
7788 return gen_rtx_MULT (mode, op0,
7789 gen_int_mode (tree_low_cst (exp1, 0),
7790 TYPE_MODE (TREE_TYPE (exp1))));
7793 if (modifier == EXPAND_STACK_PARM)
7796 /* Check for multiplying things that have been extended
7797 from a narrower type. If this machine supports multiplying
7798 in that narrower type with a result in the desired type,
7799 do it that way, and avoid the explicit type-conversion. */
7800 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7801 && TREE_CODE (type) == INTEGER_TYPE
7802 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7803 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7804 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7805 && int_fits_type_p (TREE_OPERAND (exp, 1),
7806 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7807 /* Don't use a widening multiply if a shift will do. */
7808 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7809 > HOST_BITS_PER_WIDE_INT)
7810 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7812 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7813 && (TYPE_PRECISION (TREE_TYPE
7814 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7815 == TYPE_PRECISION (TREE_TYPE
7817 (TREE_OPERAND (exp, 0), 0))))
7818 /* If both operands are extended, they must either both
7819 be zero-extended or both be sign-extended. */
7820 && (TYPE_UNSIGNED (TREE_TYPE
7821 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7822 == TYPE_UNSIGNED (TREE_TYPE
7824 (TREE_OPERAND (exp, 0), 0)))))))
7826 tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
7827 enum machine_mode innermode = TYPE_MODE (op0type);
7828 bool zextend_p = TYPE_UNSIGNED (op0type);
7829 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
7830 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
7832 if (mode == GET_MODE_WIDER_MODE (innermode))
7834 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7836 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7837 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7838 TREE_OPERAND (exp, 1),
7839 NULL_RTX, &op0, &op1, 0);
7841 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7842 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7843 NULL_RTX, &op0, &op1, 0);
7846 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7847 && innermode == word_mode)
7850 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7851 NULL_RTX, VOIDmode, 0);
7852 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7853 op1 = convert_modes (innermode, mode,
7854 expand_expr (TREE_OPERAND (exp, 1),
7855 NULL_RTX, VOIDmode, 0),
7858 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7859 NULL_RTX, VOIDmode, 0);
7860 temp = expand_binop (mode, other_optab, op0, op1, target,
7861 unsignedp, OPTAB_LIB_WIDEN);
7862 hipart = gen_highpart (innermode, temp);
7863 htem = expand_mult_highpart_adjust (innermode, hipart,
7867 emit_move_insn (hipart, htem);
7872 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7873 subtarget, &op0, &op1, 0);
7874 return expand_mult (mode, op0, op1, target, unsignedp);
7876 case TRUNC_DIV_EXPR:
7877 case FLOOR_DIV_EXPR:
7879 case ROUND_DIV_EXPR:
7880 case EXACT_DIV_EXPR:
7881 if (modifier == EXPAND_STACK_PARM)
7883 /* Possible optimization: compute the dividend with EXPAND_SUM
7884 then if the divisor is constant can optimize the case
7885 where some terms of the dividend have coeffs divisible by it. */
7886 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7887 subtarget, &op0, &op1, 0);
7888 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7891 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7892 expensive divide. If not, combine will rebuild the original
7894 if (flag_unsafe_math_optimizations && optimize && !optimize_size
7895 && TREE_CODE (type) == REAL_TYPE
7896 && !real_onep (TREE_OPERAND (exp, 0)))
7897 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7898 build (RDIV_EXPR, type,
7899 build_real (type, dconst1),
7900 TREE_OPERAND (exp, 1))),
7901 target, tmode, modifier);
7902 this_optab = sdiv_optab;
7905 case TRUNC_MOD_EXPR:
7906 case FLOOR_MOD_EXPR:
7908 case ROUND_MOD_EXPR:
7909 if (modifier == EXPAND_STACK_PARM)
7911 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7912 subtarget, &op0, &op1, 0);
7913 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7915 case FIX_ROUND_EXPR:
7916 case FIX_FLOOR_EXPR:
7918 abort (); /* Not used for C. */
7920 case FIX_TRUNC_EXPR:
7921 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7922 if (target == 0 || modifier == EXPAND_STACK_PARM)
7923 target = gen_reg_rtx (mode);
7924 expand_fix (target, op0, unsignedp);
7928 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7929 if (target == 0 || modifier == EXPAND_STACK_PARM)
7930 target = gen_reg_rtx (mode);
7931 /* expand_float can't figure out what to do if FROM has VOIDmode.
7932 So give it the correct mode. With -O, cse will optimize this. */
7933 if (GET_MODE (op0) == VOIDmode)
7934 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7936 expand_float (target, op0,
7937 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7941 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7942 if (modifier == EXPAND_STACK_PARM)
7944 temp = expand_unop (mode,
7945 ! unsignedp && flag_trapv
7946 && (GET_MODE_CLASS(mode) == MODE_INT)
7947 ? negv_optab : neg_optab, op0, target, 0);
7953 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7954 if (modifier == EXPAND_STACK_PARM)
7957 /* ABS_EXPR is not valid for complex arguments. */
7958 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7959 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7962 /* Unsigned abs is simply the operand. Testing here means we don't
7963 risk generating incorrect code below. */
7964 if (TYPE_UNSIGNED (type))
7967 return expand_abs (mode, op0, target, unsignedp,
7968 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7972 target = original_target;
7974 || modifier == EXPAND_STACK_PARM
7975 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
7976 || GET_MODE (target) != mode
7977 || (GET_CODE (target) == REG
7978 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7979 target = gen_reg_rtx (mode);
7980 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7981 target, &op0, &op1, 0);
7983 /* First try to do it with a special MIN or MAX instruction.
7984 If that does not win, use a conditional jump to select the proper
7986 this_optab = (unsignedp
7987 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7988 : (code == MIN_EXPR ? smin_optab : smax_optab));
7990 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7995 /* At this point, a MEM target is no longer useful; we will get better
7998 if (GET_CODE (target) == MEM)
7999 target = gen_reg_rtx (mode);
8001 /* If op1 was placed in target, swap op0 and op1. */
8002 if (target != op0 && target == op1)
8010 emit_move_insn (target, op0);
8012 op0 = gen_label_rtx ();
8014 /* If this mode is an integer too wide to compare properly,
8015 compare word by word. Rely on cse to optimize constant cases. */
8016 if (GET_MODE_CLASS (mode) == MODE_INT
8017 && ! can_compare_p (GE, mode, ccp_jump))
8019 if (code == MAX_EXPR)
8020 do_jump_by_parts_greater_rtx (mode, unsignedp, target, op1,
8023 do_jump_by_parts_greater_rtx (mode, unsignedp, op1, target,
8028 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
8029 unsignedp, mode, NULL_RTX, NULL_RTX, op0);
8031 emit_move_insn (target, op1);
8036 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8037 if (modifier == EXPAND_STACK_PARM)
8039 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8044 /* ??? Can optimize bitwise operations with one arg constant.
8045 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8046 and (a bitwise1 b) bitwise2 b (etc)
8047 but that is probably not worth while. */
8049 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8050 boolean values when we want in all cases to compute both of them. In
8051 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8052 as actual zero-or-1 values and then bitwise anding. In cases where
8053 there cannot be any side effects, better code would be made by
8054 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8055 how to recognize those cases. */
8057 case TRUTH_AND_EXPR:
8059 this_optab = and_optab;
8064 this_optab = ior_optab;
8067 case TRUTH_XOR_EXPR:
8069 this_optab = xor_optab;
8076 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8078 if (modifier == EXPAND_STACK_PARM)
8080 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8081 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8084 /* Could determine the answer when only additive constants differ. Also,
8085 the addition of one can be handled by changing the condition. */
8092 case UNORDERED_EXPR:
8099 temp = do_store_flag (exp,
8100 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8101 tmode != VOIDmode ? tmode : mode, 0);
8105 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8106 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8108 && GET_CODE (original_target) == REG
8109 && (GET_MODE (original_target)
8110 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8112 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8115 /* If temp is constant, we can just compute the result. */
8116 if (GET_CODE (temp) == CONST_INT)
8118 if (INTVAL (temp) != 0)
8119 emit_move_insn (target, const1_rtx);
8121 emit_move_insn (target, const0_rtx);
8126 if (temp != original_target)
8128 enum machine_mode mode1 = GET_MODE (temp);
8129 if (mode1 == VOIDmode)
8130 mode1 = tmode != VOIDmode ? tmode : mode;
8132 temp = copy_to_mode_reg (mode1, temp);
8135 op1 = gen_label_rtx ();
8136 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8137 GET_MODE (temp), unsignedp, op1);
8138 emit_move_insn (temp, const1_rtx);
8143 /* If no set-flag instruction, must generate a conditional
8144 store into a temporary variable. Drop through
8145 and handle this like && and ||. */
8147 case TRUTH_ANDIF_EXPR:
8148 case TRUTH_ORIF_EXPR:
8151 || modifier == EXPAND_STACK_PARM
8152 || ! safe_from_p (target, exp, 1)
8153 /* Make sure we don't have a hard reg (such as function's return
8154 value) live across basic blocks, if not optimizing. */
8155 || (!optimize && GET_CODE (target) == REG
8156 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8157 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8160 emit_clr_insn (target);
8162 op1 = gen_label_rtx ();
8163 jumpifnot (exp, op1);
8166 emit_0_to_1_insn (target);
8169 return ignore ? const0_rtx : target;
8171 case TRUTH_NOT_EXPR:
8172 if (modifier == EXPAND_STACK_PARM)
8174 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8175 /* The parser is careful to generate TRUTH_NOT_EXPR
8176 only with operands that are always zero or one. */
8177 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8178 target, 1, OPTAB_LIB_WIDEN);
8184 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8186 return expand_expr_real (TREE_OPERAND (exp, 1),
8187 (ignore ? const0_rtx : target),
8188 VOIDmode, modifier, alt_rtl);
8191 /* If we would have a "singleton" (see below) were it not for a
8192 conversion in each arm, bring that conversion back out. */
8193 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8194 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8195 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8196 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8198 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8199 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8201 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8202 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8203 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8204 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8205 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8206 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8207 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8208 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8209 return expand_expr (build1 (NOP_EXPR, type,
8210 build (COND_EXPR, TREE_TYPE (iftrue),
8211 TREE_OPERAND (exp, 0),
8213 target, tmode, modifier);
8217 /* Note that COND_EXPRs whose type is a structure or union
8218 are required to be constructed to contain assignments of
8219 a temporary variable, so that we can evaluate them here
8220 for side effect only. If type is void, we must do likewise. */
8222 /* If an arm of the branch requires a cleanup,
8223 only that cleanup is performed. */
8226 tree binary_op = 0, unary_op = 0;
8228 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8229 convert it to our mode, if necessary. */
8230 if (integer_onep (TREE_OPERAND (exp, 1))
8231 && integer_zerop (TREE_OPERAND (exp, 2))
8232 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8236 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8241 if (modifier == EXPAND_STACK_PARM)
8243 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
8244 if (GET_MODE (op0) == mode)
8248 target = gen_reg_rtx (mode);
8249 convert_move (target, op0, unsignedp);
8253 /* Check for X ? A + B : A. If we have this, we can copy A to the
8254 output and conditionally add B. Similarly for unary operations.
8255 Don't do this if X has side-effects because those side effects
8256 might affect A or B and the "?" operation is a sequence point in
8257 ANSI. (operand_equal_p tests for side effects.) */
8259 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8260 && operand_equal_p (TREE_OPERAND (exp, 2),
8261 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8262 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8263 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8264 && operand_equal_p (TREE_OPERAND (exp, 1),
8265 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8266 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8267 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8268 && operand_equal_p (TREE_OPERAND (exp, 2),
8269 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8270 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8271 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8272 && operand_equal_p (TREE_OPERAND (exp, 1),
8273 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8274 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8276 /* If we are not to produce a result, we have no target. Otherwise,
8277 if a target was specified use it; it will not be used as an
8278 intermediate target unless it is safe. If no target, use a
8283 else if (modifier == EXPAND_STACK_PARM)
8284 temp = assign_temp (type, 0, 0, 1);
8285 else if (original_target
8286 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8287 || (singleton && GET_CODE (original_target) == REG
8288 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8289 && original_target == var_rtx (singleton)))
8290 && GET_MODE (original_target) == mode
8291 #ifdef HAVE_conditional_move
8292 && (! can_conditionally_move_p (mode)
8293 || GET_CODE (original_target) == REG
8294 || TREE_ADDRESSABLE (type))
8296 && (GET_CODE (original_target) != MEM
8297 || TREE_ADDRESSABLE (type)))
8298 temp = original_target;
8299 else if (TREE_ADDRESSABLE (type))
8302 temp = assign_temp (type, 0, 0, 1);
8304 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8305 do the test of X as a store-flag operation, do this as
8306 A + ((X != 0) << log C). Similarly for other simple binary
8307 operators. Only do for C == 1 if BRANCH_COST is low. */
8308 if (temp && singleton && binary_op
8309 && (TREE_CODE (binary_op) == PLUS_EXPR
8310 || TREE_CODE (binary_op) == MINUS_EXPR
8311 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8312 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8313 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8314 : integer_onep (TREE_OPERAND (binary_op, 1)))
8315 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8319 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8320 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8321 ? addv_optab : add_optab)
8322 : TREE_CODE (binary_op) == MINUS_EXPR
8323 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8324 ? subv_optab : sub_optab)
8325 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8328 /* If we had X ? A : A + 1, do this as A + (X == 0). */
8329 if (singleton == TREE_OPERAND (exp, 1))
8330 cond = invert_truthvalue (TREE_OPERAND (exp, 0));
8332 cond = TREE_OPERAND (exp, 0);
8334 result = do_store_flag (cond, (safe_from_p (temp, singleton, 1)
8336 mode, BRANCH_COST <= 1);
8338 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8339 result = expand_shift (LSHIFT_EXPR, mode, result,
8340 build_int_2 (tree_log2
8344 (safe_from_p (temp, singleton, 1)
8345 ? temp : NULL_RTX), 0);
8349 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8350 return expand_binop (mode, boptab, op1, result, temp,
8351 unsignedp, OPTAB_LIB_WIDEN);
8355 do_pending_stack_adjust ();
8357 op0 = gen_label_rtx ();
8359 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8363 /* If the target conflicts with the other operand of the
8364 binary op, we can't use it. Also, we can't use the target
8365 if it is a hard register, because evaluating the condition
8366 might clobber it. */
8368 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8369 || (GET_CODE (temp) == REG
8370 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8371 temp = gen_reg_rtx (mode);
8372 store_expr (singleton, temp,
8373 modifier == EXPAND_STACK_PARM ? 2 : 0);
8376 expand_expr (singleton,
8377 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8378 if (singleton == TREE_OPERAND (exp, 1))
8379 jumpif (TREE_OPERAND (exp, 0), op0);
8381 jumpifnot (TREE_OPERAND (exp, 0), op0);
8383 start_cleanup_deferral ();
8384 if (binary_op && temp == 0)
8385 /* Just touch the other operand. */
8386 expand_expr (TREE_OPERAND (binary_op, 1),
8387 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8389 store_expr (build (TREE_CODE (binary_op), type,
8390 make_tree (type, temp),
8391 TREE_OPERAND (binary_op, 1)),
8392 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8394 store_expr (build1 (TREE_CODE (unary_op), type,
8395 make_tree (type, temp)),
8396 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8399 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8400 comparison operator. If we have one of these cases, set the
8401 output to A, branch on A (cse will merge these two references),
8402 then set the output to FOO. */
8404 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8405 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8406 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8407 TREE_OPERAND (exp, 1), 0)
8408 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8409 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8410 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8412 if (GET_CODE (temp) == REG
8413 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8414 temp = gen_reg_rtx (mode);
8415 store_expr (TREE_OPERAND (exp, 1), temp,
8416 modifier == EXPAND_STACK_PARM ? 2 : 0);
8417 jumpif (TREE_OPERAND (exp, 0), op0);
8419 start_cleanup_deferral ();
8420 if (TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8421 store_expr (TREE_OPERAND (exp, 2), temp,
8422 modifier == EXPAND_STACK_PARM ? 2 : 0);
8424 expand_expr (TREE_OPERAND (exp, 2),
8425 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8429 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8430 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8431 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8432 TREE_OPERAND (exp, 2), 0)
8433 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8434 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8435 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8437 if (GET_CODE (temp) == REG
8438 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8439 temp = gen_reg_rtx (mode);
8440 store_expr (TREE_OPERAND (exp, 2), temp,
8441 modifier == EXPAND_STACK_PARM ? 2 : 0);
8442 jumpifnot (TREE_OPERAND (exp, 0), op0);
8444 start_cleanup_deferral ();
8445 if (TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8446 store_expr (TREE_OPERAND (exp, 1), temp,
8447 modifier == EXPAND_STACK_PARM ? 2 : 0);
8449 expand_expr (TREE_OPERAND (exp, 1),
8450 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8455 op1 = gen_label_rtx ();
8456 jumpifnot (TREE_OPERAND (exp, 0), op0);
8458 start_cleanup_deferral ();
8460 /* One branch of the cond can be void, if it never returns. For
8461 example A ? throw : E */
8463 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8464 store_expr (TREE_OPERAND (exp, 1), temp,
8465 modifier == EXPAND_STACK_PARM ? 2 : 0);
8467 expand_expr (TREE_OPERAND (exp, 1),
8468 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8469 end_cleanup_deferral ();
8471 emit_jump_insn (gen_jump (op1));
8474 start_cleanup_deferral ();
8476 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8477 store_expr (TREE_OPERAND (exp, 2), temp,
8478 modifier == EXPAND_STACK_PARM ? 2 : 0);
8480 expand_expr (TREE_OPERAND (exp, 2),
8481 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8484 end_cleanup_deferral ();
8495 /* Something needs to be initialized, but we didn't know
8496 where that thing was when building the tree. For example,
8497 it could be the return value of a function, or a parameter
8498 to a function which lays down in the stack, or a temporary
8499 variable which must be passed by reference.
8501 We guarantee that the expression will either be constructed
8502 or copied into our original target. */
8504 tree slot = TREE_OPERAND (exp, 0);
8505 tree cleanups = NULL_TREE;
8508 if (TREE_CODE (slot) != VAR_DECL)
8512 target = original_target;
8514 /* Set this here so that if we get a target that refers to a
8515 register variable that's already been used, put_reg_into_stack
8516 knows that it should fix up those uses. */
8517 TREE_USED (slot) = 1;
8521 if (DECL_RTL_SET_P (slot))
8523 target = DECL_RTL (slot);
8524 /* If we have already expanded the slot, so don't do
8526 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8531 target = assign_temp (type, 2, 0, 1);
8532 SET_DECL_RTL (slot, target);
8533 if (TREE_ADDRESSABLE (slot))
8534 put_var_into_stack (slot, /*rescan=*/false);
8536 /* Since SLOT is not known to the called function
8537 to belong to its stack frame, we must build an explicit
8538 cleanup. This case occurs when we must build up a reference
8539 to pass the reference as an argument. In this case,
8540 it is very likely that such a reference need not be
8543 if (TREE_OPERAND (exp, 2) == 0)
8544 TREE_OPERAND (exp, 2)
8545 = lang_hooks.maybe_build_cleanup (slot);
8546 cleanups = TREE_OPERAND (exp, 2);
8551 /* This case does occur, when expanding a parameter which
8552 needs to be constructed on the stack. The target
8553 is the actual stack address that we want to initialize.
8554 The function we call will perform the cleanup in this case. */
8556 /* If we have already assigned it space, use that space,
8557 not target that we were passed in, as our target
8558 parameter is only a hint. */
8559 if (DECL_RTL_SET_P (slot))
8561 target = DECL_RTL (slot);
8562 /* If we have already expanded the slot, so don't do
8564 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8569 SET_DECL_RTL (slot, target);
8570 /* If we must have an addressable slot, then make sure that
8571 the RTL that we just stored in slot is OK. */
8572 if (TREE_ADDRESSABLE (slot))
8573 put_var_into_stack (slot, /*rescan=*/true);
8577 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8578 /* Mark it as expanded. */
8579 TREE_OPERAND (exp, 1) = NULL_TREE;
8581 store_expr (exp1, target, modifier == EXPAND_STACK_PARM ? 2 : 0);
8583 expand_decl_cleanup_eh (NULL_TREE, cleanups, CLEANUP_EH_ONLY (exp));
8590 tree lhs = TREE_OPERAND (exp, 0);
8591 tree rhs = TREE_OPERAND (exp, 1);
8593 temp = expand_assignment (lhs, rhs, ! ignore);
8599 /* If lhs is complex, expand calls in rhs before computing it.
8600 That's so we don't compute a pointer and save it over a
8601 call. If lhs is simple, compute it first so we can give it
8602 as a target if the rhs is just a call. This avoids an
8603 extra temp and copy and that prevents a partial-subsumption
8604 which makes bad code. Actually we could treat
8605 component_ref's of vars like vars. */
8607 tree lhs = TREE_OPERAND (exp, 0);
8608 tree rhs = TREE_OPERAND (exp, 1);
8612 /* Check for |= or &= of a bitfield of size one into another bitfield
8613 of size 1. In this case, (unless we need the result of the
8614 assignment) we can do this more efficiently with a
8615 test followed by an assignment, if necessary.
8617 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8618 things change so we do, this code should be enhanced to
8621 && TREE_CODE (lhs) == COMPONENT_REF
8622 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8623 || TREE_CODE (rhs) == BIT_AND_EXPR)
8624 && TREE_OPERAND (rhs, 0) == lhs
8625 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8626 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8627 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8629 rtx label = gen_label_rtx ();
8631 do_jump (TREE_OPERAND (rhs, 1),
8632 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8633 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8634 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8635 (TREE_CODE (rhs) == BIT_IOR_EXPR
8637 : integer_zero_node)),
8639 do_pending_stack_adjust ();
8644 temp = expand_assignment (lhs, rhs, ! ignore);
8650 if (!TREE_OPERAND (exp, 0))
8651 expand_null_return ();
8653 expand_return (TREE_OPERAND (exp, 0));
8656 case PREINCREMENT_EXPR:
8657 case PREDECREMENT_EXPR:
8658 return expand_increment (exp, 0, ignore);
8660 case POSTINCREMENT_EXPR:
8661 case POSTDECREMENT_EXPR:
8662 /* Faster to treat as pre-increment if result is not used. */
8663 return expand_increment (exp, ! ignore, ignore);
8666 if (modifier == EXPAND_STACK_PARM)
8668 /* Are we taking the address of a nested function? */
8669 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8670 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8671 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8672 && ! TREE_STATIC (exp))
8674 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8675 op0 = force_operand (op0, target);
8677 /* If we are taking the address of something erroneous, just
8679 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8681 /* If we are taking the address of a constant and are at the
8682 top level, we have to use output_constant_def since we can't
8683 call force_const_mem at top level. */
8685 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8686 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
8688 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
8691 /* We make sure to pass const0_rtx down if we came in with
8692 ignore set, to avoid doing the cleanups twice for something. */
8693 op0 = expand_expr (TREE_OPERAND (exp, 0),
8694 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8695 (modifier == EXPAND_INITIALIZER
8696 ? modifier : EXPAND_CONST_ADDRESS));
8698 /* If we are going to ignore the result, OP0 will have been set
8699 to const0_rtx, so just return it. Don't get confused and
8700 think we are taking the address of the constant. */
8704 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8705 clever and returns a REG when given a MEM. */
8706 op0 = protect_from_queue (op0, 1);
8708 /* We would like the object in memory. If it is a constant, we can
8709 have it be statically allocated into memory. For a non-constant,
8710 we need to allocate some memory and store the value into it. */
8712 if (CONSTANT_P (op0))
8713 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8715 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8716 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8717 || GET_CODE (op0) == PARALLEL || GET_CODE (op0) == LO_SUM)
8719 /* If the operand is a SAVE_EXPR, we can deal with this by
8720 forcing the SAVE_EXPR into memory. */
8721 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
8723 put_var_into_stack (TREE_OPERAND (exp, 0),
8725 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
8729 /* If this object is in a register, it can't be BLKmode. */
8730 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8731 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8733 if (GET_CODE (op0) == PARALLEL)
8734 /* Handle calls that pass values in multiple
8735 non-contiguous locations. The Irix 6 ABI has examples
8737 emit_group_store (memloc, op0, inner_type,
8738 int_size_in_bytes (inner_type));
8740 emit_move_insn (memloc, op0);
8746 if (GET_CODE (op0) != MEM)
8749 mark_temp_addr_taken (op0);
8750 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8752 op0 = XEXP (op0, 0);
8753 if (GET_MODE (op0) == Pmode && mode == ptr_mode)
8754 op0 = convert_memory_address (ptr_mode, op0);
8758 /* If OP0 is not aligned as least as much as the type requires, we
8759 need to make a temporary, copy OP0 to it, and take the address of
8760 the temporary. We want to use the alignment of the type, not of
8761 the operand. Note that this is incorrect for FUNCTION_TYPE, but
8762 the test for BLKmode means that can't happen. The test for
8763 BLKmode is because we never make mis-aligned MEMs with
8766 We don't need to do this at all if the machine doesn't have
8767 strict alignment. */
8768 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
8769 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
8771 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
8773 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8776 if (TYPE_ALIGN_OK (inner_type))
8779 if (TREE_ADDRESSABLE (inner_type))
8781 /* We can't make a bitwise copy of this object, so fail. */
8782 error ("cannot take the address of an unaligned member");
8786 new = assign_stack_temp_for_type
8787 (TYPE_MODE (inner_type),
8788 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
8789 : int_size_in_bytes (inner_type),
8790 1, build_qualified_type (inner_type,
8791 (TYPE_QUALS (inner_type)
8792 | TYPE_QUAL_CONST)));
8794 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)),
8795 (modifier == EXPAND_STACK_PARM
8796 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
8801 op0 = force_operand (XEXP (op0, 0), target);
8805 && GET_CODE (op0) != REG
8806 && modifier != EXPAND_CONST_ADDRESS
8807 && modifier != EXPAND_INITIALIZER
8808 && modifier != EXPAND_SUM)
8809 op0 = force_reg (Pmode, op0);
8811 if (GET_CODE (op0) == REG
8812 && ! REG_USERVAR_P (op0))
8813 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8815 if (GET_MODE (op0) == Pmode && mode == ptr_mode)
8816 op0 = convert_memory_address (ptr_mode, op0);
8820 case ENTRY_VALUE_EXPR:
8823 /* COMPLEX type for Extended Pascal & Fortran */
8826 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8829 /* Get the rtx code of the operands. */
8830 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8831 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8834 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8838 /* Move the real (op0) and imaginary (op1) parts to their location. */
8839 emit_move_insn (gen_realpart (mode, target), op0);
8840 emit_move_insn (gen_imagpart (mode, target), op1);
8842 insns = get_insns ();
8845 /* Complex construction should appear as a single unit. */
8846 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8847 each with a separate pseudo as destination.
8848 It's not correct for flow to treat them as a unit. */
8849 if (GET_CODE (target) != CONCAT)
8850 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8858 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8859 return gen_realpart (mode, op0);
8862 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8863 return gen_imagpart (mode, op0);
8867 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8871 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8874 target = gen_reg_rtx (mode);
8878 /* Store the realpart and the negated imagpart to target. */
8879 emit_move_insn (gen_realpart (partmode, target),
8880 gen_realpart (partmode, op0));
8882 imag_t = gen_imagpart (partmode, target);
8883 temp = expand_unop (partmode,
8884 ! unsignedp && flag_trapv
8885 && (GET_MODE_CLASS(partmode) == MODE_INT)
8886 ? negv_optab : neg_optab,
8887 gen_imagpart (partmode, op0), imag_t, 0);
8889 emit_move_insn (imag_t, temp);
8891 insns = get_insns ();
8894 /* Conjugate should appear as a single unit
8895 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8896 each with a separate pseudo as destination.
8897 It's not correct for flow to treat them as a unit. */
8898 if (GET_CODE (target) != CONCAT)
8899 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8906 case TRY_CATCH_EXPR:
8908 tree handler = TREE_OPERAND (exp, 1);
8910 expand_eh_region_start ();
8912 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8914 expand_eh_region_end_cleanup (handler);
8919 case TRY_FINALLY_EXPR:
8921 tree try_block = TREE_OPERAND (exp, 0);
8922 tree finally_block = TREE_OPERAND (exp, 1);
8924 if (!optimize || unsafe_for_reeval (finally_block) > 1)
8926 /* In this case, wrapping FINALLY_BLOCK in an UNSAVE_EXPR
8927 is not sufficient, so we cannot expand the block twice.
8928 So we play games with GOTO_SUBROUTINE_EXPR to let us
8929 expand the thing only once. */
8930 /* When not optimizing, we go ahead with this form since
8931 (1) user breakpoints operate more predictably without
8932 code duplication, and
8933 (2) we're not running any of the global optimizers
8934 that would explode in time/space with the highly
8935 connected CFG created by the indirect branching. */
8937 rtx finally_label = gen_label_rtx ();
8938 rtx done_label = gen_label_rtx ();
8939 rtx return_link = gen_reg_rtx (Pmode);
8940 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8941 (tree) finally_label, (tree) return_link);
8942 TREE_SIDE_EFFECTS (cleanup) = 1;
8944 /* Start a new binding layer that will keep track of all cleanup
8945 actions to be performed. */
8946 expand_start_bindings (2);
8947 target_temp_slot_level = temp_slot_level;
8949 expand_decl_cleanup (NULL_TREE, cleanup);
8950 op0 = expand_expr (try_block, target, tmode, modifier);
8952 preserve_temp_slots (op0);
8953 expand_end_bindings (NULL_TREE, 0, 0);
8954 emit_jump (done_label);
8955 emit_label (finally_label);
8956 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8957 emit_indirect_jump (return_link);
8958 emit_label (done_label);
8962 expand_start_bindings (2);
8963 target_temp_slot_level = temp_slot_level;
8965 expand_decl_cleanup (NULL_TREE, finally_block);
8966 op0 = expand_expr (try_block, target, tmode, modifier);
8968 preserve_temp_slots (op0);
8969 expand_end_bindings (NULL_TREE, 0, 0);
8975 case GOTO_SUBROUTINE_EXPR:
8977 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8978 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8979 rtx return_address = gen_label_rtx ();
8980 emit_move_insn (return_link,
8981 gen_rtx_LABEL_REF (Pmode, return_address));
8983 emit_label (return_address);
8988 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8991 return get_exception_pointer (cfun);
8994 /* Function descriptors are not valid except for as
8995 initialization constants, and should not be expanded. */
8999 /* ??? Use (*fun) form because expand_expr is a macro. */
9000 return (*lang_hooks.expand_expr) (exp, original_target, tmode,
9004 /* Here to do an ordinary binary operator, generating an instruction
9005 from the optab already placed in `this_optab'. */
9007 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
9008 subtarget, &op0, &op1, 0);
9010 if (modifier == EXPAND_STACK_PARM)
9012 temp = expand_binop (mode, this_optab, op0, op1, target,
9013 unsignedp, OPTAB_LIB_WIDEN);
9019 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9020 when applied to the address of EXP produces an address known to be
9021 aligned more than BIGGEST_ALIGNMENT. */
9024 is_aligning_offset (tree offset, tree exp)
9026 /* Strip off any conversions. */
9027 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9028 || TREE_CODE (offset) == NOP_EXPR
9029 || TREE_CODE (offset) == CONVERT_EXPR)
9030 offset = TREE_OPERAND (offset, 0);
9032 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9033 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9034 if (TREE_CODE (offset) != BIT_AND_EXPR
9035 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9036 || compare_tree_int (TREE_OPERAND (offset, 1),
9037 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
9038 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9041 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9042 It must be NEGATE_EXPR. Then strip any more conversions. */
9043 offset = TREE_OPERAND (offset, 0);
9044 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9045 || TREE_CODE (offset) == NOP_EXPR
9046 || TREE_CODE (offset) == CONVERT_EXPR)
9047 offset = TREE_OPERAND (offset, 0);
9049 if (TREE_CODE (offset) != NEGATE_EXPR)
9052 offset = TREE_OPERAND (offset, 0);
9053 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9054 || TREE_CODE (offset) == NOP_EXPR
9055 || TREE_CODE (offset) == CONVERT_EXPR)
9056 offset = TREE_OPERAND (offset, 0);
9058 /* This must now be the address of EXP. */
9059 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
9062 /* Return the tree node if an ARG corresponds to a string constant or zero
9063 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9064 in bytes within the string that ARG is accessing. The type of the
9065 offset will be `sizetype'. */
9068 string_constant (tree arg, tree *ptr_offset)
9072 if (TREE_CODE (arg) == ADDR_EXPR
9073 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9075 *ptr_offset = size_zero_node;
9076 return TREE_OPERAND (arg, 0);
9078 else if (TREE_CODE (arg) == PLUS_EXPR)
9080 tree arg0 = TREE_OPERAND (arg, 0);
9081 tree arg1 = TREE_OPERAND (arg, 1);
9086 if (TREE_CODE (arg0) == ADDR_EXPR
9087 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9089 *ptr_offset = convert (sizetype, arg1);
9090 return TREE_OPERAND (arg0, 0);
9092 else if (TREE_CODE (arg1) == ADDR_EXPR
9093 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9095 *ptr_offset = convert (sizetype, arg0);
9096 return TREE_OPERAND (arg1, 0);
9103 /* Expand code for a post- or pre- increment or decrement
9104 and return the RTX for the result.
9105 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9108 expand_increment (tree exp, int post, int ignore)
9112 tree incremented = TREE_OPERAND (exp, 0);
9113 optab this_optab = add_optab;
9115 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9116 int op0_is_copy = 0;
9117 int single_insn = 0;
9118 /* 1 means we can't store into OP0 directly,
9119 because it is a subreg narrower than a word,
9120 and we don't dare clobber the rest of the word. */
9123 /* Stabilize any component ref that might need to be
9124 evaluated more than once below. */
9126 || TREE_CODE (incremented) == BIT_FIELD_REF
9127 || (TREE_CODE (incremented) == COMPONENT_REF
9128 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9129 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9130 incremented = stabilize_reference (incremented);
9131 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9132 ones into save exprs so that they don't accidentally get evaluated
9133 more than once by the code below. */
9134 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9135 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9136 incremented = save_expr (incremented);
9138 /* Compute the operands as RTX.
9139 Note whether OP0 is the actual lvalue or a copy of it:
9140 I believe it is a copy iff it is a register or subreg
9141 and insns were generated in computing it. */
9143 temp = get_last_insn ();
9144 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
9146 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9147 in place but instead must do sign- or zero-extension during assignment,
9148 so we copy it into a new register and let the code below use it as
9151 Note that we can safely modify this SUBREG since it is know not to be
9152 shared (it was made by the expand_expr call above). */
9154 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9157 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9161 else if (GET_CODE (op0) == SUBREG
9162 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9164 /* We cannot increment this SUBREG in place. If we are
9165 post-incrementing, get a copy of the old value. Otherwise,
9166 just mark that we cannot increment in place. */
9168 op0 = copy_to_reg (op0);
9173 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9174 && temp != get_last_insn ());
9175 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9177 /* Decide whether incrementing or decrementing. */
9178 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9179 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9180 this_optab = sub_optab;
9182 /* Convert decrement by a constant into a negative increment. */
9183 if (this_optab == sub_optab
9184 && GET_CODE (op1) == CONST_INT)
9186 op1 = GEN_INT (-INTVAL (op1));
9187 this_optab = add_optab;
9190 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9191 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9193 /* For a preincrement, see if we can do this with a single instruction. */
9196 icode = (int) this_optab->handlers[(int) mode].insn_code;
9197 if (icode != (int) CODE_FOR_nothing
9198 /* Make sure that OP0 is valid for operands 0 and 1
9199 of the insn we want to queue. */
9200 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9201 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9202 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9206 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9207 then we cannot just increment OP0. We must therefore contrive to
9208 increment the original value. Then, for postincrement, we can return
9209 OP0 since it is a copy of the old value. For preincrement, expand here
9210 unless we can do it with a single insn.
9212 Likewise if storing directly into OP0 would clobber high bits
9213 we need to preserve (bad_subreg). */
9214 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9216 /* This is the easiest way to increment the value wherever it is.
9217 Problems with multiple evaluation of INCREMENTED are prevented
9218 because either (1) it is a component_ref or preincrement,
9219 in which case it was stabilized above, or (2) it is an array_ref
9220 with constant index in an array in a register, which is
9221 safe to reevaluate. */
9222 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9223 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9224 ? MINUS_EXPR : PLUS_EXPR),
9227 TREE_OPERAND (exp, 1));
9229 while (TREE_CODE (incremented) == NOP_EXPR
9230 || TREE_CODE (incremented) == CONVERT_EXPR)
9232 newexp = convert (TREE_TYPE (incremented), newexp);
9233 incremented = TREE_OPERAND (incremented, 0);
9236 temp = expand_assignment (incremented, newexp, ! post && ! ignore);
9237 return post ? op0 : temp;
9242 /* We have a true reference to the value in OP0.
9243 If there is an insn to add or subtract in this mode, queue it.
9244 Queuing the increment insn avoids the register shuffling
9245 that often results if we must increment now and first save
9246 the old value for subsequent use. */
9248 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9249 op0 = stabilize (op0);
9252 icode = (int) this_optab->handlers[(int) mode].insn_code;
9253 if (icode != (int) CODE_FOR_nothing
9254 /* Make sure that OP0 is valid for operands 0 and 1
9255 of the insn we want to queue. */
9256 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9257 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9259 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9260 op1 = force_reg (mode, op1);
9262 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9264 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9266 rtx addr = (general_operand (XEXP (op0, 0), mode)
9267 ? force_reg (Pmode, XEXP (op0, 0))
9268 : copy_to_reg (XEXP (op0, 0)));
9271 op0 = replace_equiv_address (op0, addr);
9272 temp = force_reg (GET_MODE (op0), op0);
9273 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9274 op1 = force_reg (mode, op1);
9276 /* The increment queue is LIFO, thus we have to `queue'
9277 the instructions in reverse order. */
9278 enqueue_insn (op0, gen_move_insn (op0, temp));
9279 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9284 /* Preincrement, or we can't increment with one simple insn. */
9286 /* Save a copy of the value before inc or dec, to return it later. */
9287 temp = value = copy_to_reg (op0);
9289 /* Arrange to return the incremented value. */
9290 /* Copy the rtx because expand_binop will protect from the queue,
9291 and the results of that would be invalid for us to return
9292 if our caller does emit_queue before using our result. */
9293 temp = copy_rtx (value = op0);
9295 /* Increment however we can. */
9296 op1 = expand_binop (mode, this_optab, value, op1, op0,
9297 TYPE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9299 /* Make sure the value is stored into OP0. */
9301 emit_move_insn (op0, op1);
9306 /* Generate code to calculate EXP using a store-flag instruction
9307 and return an rtx for the result. EXP is either a comparison
9308 or a TRUTH_NOT_EXPR whose operand is a comparison.
9310 If TARGET is nonzero, store the result there if convenient.
9312 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
9315 Return zero if there is no suitable set-flag instruction
9316 available on this machine.
9318 Once expand_expr has been called on the arguments of the comparison,
9319 we are committed to doing the store flag, since it is not safe to
9320 re-evaluate the expression. We emit the store-flag insn by calling
9321 emit_store_flag, but only expand the arguments if we have a reason
9322 to believe that emit_store_flag will be successful. If we think that
9323 it will, but it isn't, we have to simulate the store-flag with a
9324 set/jump/set sequence. */
9327 do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
9330 tree arg0, arg1, type;
9332 enum machine_mode operand_mode;
9336 enum insn_code icode;
9337 rtx subtarget = target;
9340 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9341 result at the end. We can't simply invert the test since it would
9342 have already been inverted if it were valid. This case occurs for
9343 some floating-point comparisons. */
9345 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9346 invert = 1, exp = TREE_OPERAND (exp, 0);
9348 arg0 = TREE_OPERAND (exp, 0);
9349 arg1 = TREE_OPERAND (exp, 1);
9351 /* Don't crash if the comparison was erroneous. */
9352 if (arg0 == error_mark_node || arg1 == error_mark_node)
9355 type = TREE_TYPE (arg0);
9356 operand_mode = TYPE_MODE (type);
9357 unsignedp = TYPE_UNSIGNED (type);
9359 /* We won't bother with BLKmode store-flag operations because it would mean
9360 passing a lot of information to emit_store_flag. */
9361 if (operand_mode == BLKmode)
9364 /* We won't bother with store-flag operations involving function pointers
9365 when function pointers must be canonicalized before comparisons. */
9366 #ifdef HAVE_canonicalize_funcptr_for_compare
9367 if (HAVE_canonicalize_funcptr_for_compare
9368 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9369 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9371 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9372 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9373 == FUNCTION_TYPE))))
9380 /* Get the rtx comparison code to use. We know that EXP is a comparison
9381 operation of some type. Some comparisons against 1 and -1 can be
9382 converted to comparisons with zero. Do so here so that the tests
9383 below will be aware that we have a comparison with zero. These
9384 tests will not catch constants in the first operand, but constants
9385 are rarely passed as the first operand. */
9387 switch (TREE_CODE (exp))
9396 if (integer_onep (arg1))
9397 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9399 code = unsignedp ? LTU : LT;
9402 if (! unsignedp && integer_all_onesp (arg1))
9403 arg1 = integer_zero_node, code = LT;
9405 code = unsignedp ? LEU : LE;
9408 if (! unsignedp && integer_all_onesp (arg1))
9409 arg1 = integer_zero_node, code = GE;
9411 code = unsignedp ? GTU : GT;
9414 if (integer_onep (arg1))
9415 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9417 code = unsignedp ? GEU : GE;
9420 case UNORDERED_EXPR:
9446 /* Put a constant second. */
9447 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
9449 tem = arg0; arg0 = arg1; arg1 = tem;
9450 code = swap_condition (code);
9453 /* If this is an equality or inequality test of a single bit, we can
9454 do this by shifting the bit being tested to the low-order bit and
9455 masking the result with the constant 1. If the condition was EQ,
9456 we xor it with 1. This does not require an scc insn and is faster
9457 than an scc insn even if we have it.
9459 The code to make this transformation was moved into fold_single_bit_test,
9460 so we just call into the folder and expand its result. */
9462 if ((code == NE || code == EQ)
9463 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9464 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9466 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
9467 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
9469 target, VOIDmode, EXPAND_NORMAL);
9472 /* Now see if we are likely to be able to do this. Return if not. */
9473 if (! can_compare_p (code, operand_mode, ccp_store_flag))
9476 icode = setcc_gen_code[(int) code];
9477 if (icode == CODE_FOR_nothing
9478 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
9480 /* We can only do this if it is one of the special cases that
9481 can be handled without an scc insn. */
9482 if ((code == LT && integer_zerop (arg1))
9483 || (! only_cheap && code == GE && integer_zerop (arg1)))
9485 else if (BRANCH_COST >= 0
9486 && ! only_cheap && (code == NE || code == EQ)
9487 && TREE_CODE (type) != REAL_TYPE
9488 && ((abs_optab->handlers[(int) operand_mode].insn_code
9489 != CODE_FOR_nothing)
9490 || (ffs_optab->handlers[(int) operand_mode].insn_code
9491 != CODE_FOR_nothing)))
9497 if (! get_subtarget (target)
9498 || GET_MODE (subtarget) != operand_mode)
9501 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
9504 target = gen_reg_rtx (mode);
9506 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
9507 because, if the emit_store_flag does anything it will succeed and
9508 OP0 and OP1 will not be used subsequently. */
9510 result = emit_store_flag (target, code,
9511 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
9512 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
9513 operand_mode, unsignedp, 1);
9518 result = expand_binop (mode, xor_optab, result, const1_rtx,
9519 result, 0, OPTAB_LIB_WIDEN);
9523 /* If this failed, we have to do this with set/compare/jump/set code. */
9524 if (GET_CODE (target) != REG
9525 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9526 target = gen_reg_rtx (GET_MODE (target));
9528 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9529 result = compare_from_rtx (op0, op1, code, unsignedp,
9530 operand_mode, NULL_RTX);
9531 if (GET_CODE (result) == CONST_INT)
9532 return (((result == const0_rtx && ! invert)
9533 || (result != const0_rtx && invert))
9534 ? const0_rtx : const1_rtx);
9536 /* The code of RESULT may not match CODE if compare_from_rtx
9537 decided to swap its operands and reverse the original code.
9539 We know that compare_from_rtx returns either a CONST_INT or
9540 a new comparison code, so it is safe to just extract the
9541 code from RESULT. */
9542 code = GET_CODE (result);
9544 label = gen_label_rtx ();
9545 if (bcc_gen_fctn[(int) code] == 0)
9548 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
9549 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9556 /* Stubs in case we haven't got a casesi insn. */
9558 # define HAVE_casesi 0
9559 # define gen_casesi(a, b, c, d, e) (0)
9560 # define CODE_FOR_casesi CODE_FOR_nothing
9563 /* If the machine does not have a case insn that compares the bounds,
9564 this means extra overhead for dispatch tables, which raises the
9565 threshold for using them. */
9566 #ifndef CASE_VALUES_THRESHOLD
9567 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
9568 #endif /* CASE_VALUES_THRESHOLD */
9571 case_values_threshold (void)
9573 return CASE_VALUES_THRESHOLD;
9576 /* Attempt to generate a casesi instruction. Returns 1 if successful,
9577 0 otherwise (i.e. if there is no casesi instruction). */
9579 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
9580 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
9582 enum machine_mode index_mode = SImode;
9583 int index_bits = GET_MODE_BITSIZE (index_mode);
9584 rtx op1, op2, index;
9585 enum machine_mode op_mode;
9590 /* Convert the index to SImode. */
9591 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
9593 enum machine_mode omode = TYPE_MODE (index_type);
9594 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
9596 /* We must handle the endpoints in the original mode. */
9597 index_expr = build (MINUS_EXPR, index_type,
9598 index_expr, minval);
9599 minval = integer_zero_node;
9600 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9601 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
9602 omode, 1, default_label);
9603 /* Now we can safely truncate. */
9604 index = convert_to_mode (index_mode, index, 0);
9608 if (TYPE_MODE (index_type) != index_mode)
9610 index_expr = convert (lang_hooks.types.type_for_size
9611 (index_bits, 0), index_expr);
9612 index_type = TREE_TYPE (index_expr);
9615 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9618 index = protect_from_queue (index, 0);
9619 do_pending_stack_adjust ();
9621 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
9622 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
9624 index = copy_to_mode_reg (op_mode, index);
9626 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
9628 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
9629 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
9630 op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
9631 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
9633 op1 = copy_to_mode_reg (op_mode, op1);
9635 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
9637 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
9638 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
9639 op2, TYPE_UNSIGNED (TREE_TYPE (range)));
9640 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
9642 op2 = copy_to_mode_reg (op_mode, op2);
9644 emit_jump_insn (gen_casesi (index, op1, op2,
9645 table_label, default_label));
9649 /* Attempt to generate a tablejump instruction; same concept. */
9650 #ifndef HAVE_tablejump
9651 #define HAVE_tablejump 0
9652 #define gen_tablejump(x, y) (0)
9655 /* Subroutine of the next function.
9657 INDEX is the value being switched on, with the lowest value
9658 in the table already subtracted.
9659 MODE is its expected mode (needed if INDEX is constant).
9660 RANGE is the length of the jump table.
9661 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9663 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9664 index value is out of range. */
9667 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
9672 if (INTVAL (range) > cfun->max_jumptable_ents)
9673 cfun->max_jumptable_ents = INTVAL (range);
9675 /* Do an unsigned comparison (in the proper mode) between the index
9676 expression and the value which represents the length of the range.
9677 Since we just finished subtracting the lower bound of the range
9678 from the index expression, this comparison allows us to simultaneously
9679 check that the original index expression value is both greater than
9680 or equal to the minimum value of the range and less than or equal to
9681 the maximum value of the range. */
9683 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
9686 /* If index is in range, it must fit in Pmode.
9687 Convert to Pmode so we can index with it. */
9689 index = convert_to_mode (Pmode, index, 1);
9691 /* Don't let a MEM slip through, because then INDEX that comes
9692 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9693 and break_out_memory_refs will go to work on it and mess it up. */
9694 #ifdef PIC_CASE_VECTOR_ADDRESS
9695 if (flag_pic && GET_CODE (index) != REG)
9696 index = copy_to_mode_reg (Pmode, index);
9699 /* If flag_force_addr were to affect this address
9700 it could interfere with the tricky assumptions made
9701 about addresses that contain label-refs,
9702 which may be valid only very near the tablejump itself. */
9703 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9704 GET_MODE_SIZE, because this indicates how large insns are. The other
9705 uses should all be Pmode, because they are addresses. This code
9706 could fail if addresses and insns are not the same size. */
9707 index = gen_rtx_PLUS (Pmode,
9708 gen_rtx_MULT (Pmode, index,
9709 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9710 gen_rtx_LABEL_REF (Pmode, table_label));
9711 #ifdef PIC_CASE_VECTOR_ADDRESS
9713 index = PIC_CASE_VECTOR_ADDRESS (index);
9716 index = memory_address_noforce (CASE_VECTOR_MODE, index);
9717 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9718 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
9719 RTX_UNCHANGING_P (vector) = 1;
9720 MEM_NOTRAP_P (vector) = 1;
9721 convert_move (temp, vector, 0);
9723 emit_jump_insn (gen_tablejump (temp, table_label));
9725 /* If we are generating PIC code or if the table is PC-relative, the
9726 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9727 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
9732 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
9733 rtx table_label, rtx default_label)
9737 if (! HAVE_tablejump)
9740 index_expr = fold (build (MINUS_EXPR, index_type,
9741 convert (index_type, index_expr),
9742 convert (index_type, minval)));
9743 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9745 index = protect_from_queue (index, 0);
9746 do_pending_stack_adjust ();
9748 do_tablejump (index, TYPE_MODE (index_type),
9749 convert_modes (TYPE_MODE (index_type),
9750 TYPE_MODE (TREE_TYPE (range)),
9751 expand_expr (range, NULL_RTX,
9753 TYPE_UNSIGNED (TREE_TYPE (range))),
9754 table_label, default_label);
9758 /* Nonzero if the mode is a valid vector mode for this architecture.
9759 This returns nonzero even if there is no hardware support for the
9760 vector mode, but we can emulate with narrower modes. */
9763 vector_mode_valid_p (enum machine_mode mode)
9765 enum mode_class class = GET_MODE_CLASS (mode);
9766 enum machine_mode innermode;
9768 /* Doh! What's going on? */
9769 if (class != MODE_VECTOR_INT
9770 && class != MODE_VECTOR_FLOAT)
9773 /* Hardware support. Woo hoo! */
9774 if (VECTOR_MODE_SUPPORTED_P (mode))
9777 innermode = GET_MODE_INNER (mode);
9779 /* We should probably return 1 if requesting V4DI and we have no DI,
9780 but we have V2DI, but this is probably very unlikely. */
9782 /* If we have support for the inner mode, we can safely emulate it.
9783 We may not have V2DI, but me can emulate with a pair of DIs. */
9784 return mov_optab->handlers[innermode].insn_code != CODE_FOR_nothing;
9787 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
9789 const_vector_from_tree (tree exp)
9794 enum machine_mode inner, mode;
9796 mode = TYPE_MODE (TREE_TYPE (exp));
9798 if (is_zeros_p (exp))
9799 return CONST0_RTX (mode);
9801 units = GET_MODE_NUNITS (mode);
9802 inner = GET_MODE_INNER (mode);
9804 v = rtvec_alloc (units);
9806 link = TREE_VECTOR_CST_ELTS (exp);
9807 for (i = 0; link; link = TREE_CHAIN (link), ++i)
9809 elt = TREE_VALUE (link);
9811 if (TREE_CODE (elt) == REAL_CST)
9812 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
9815 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
9816 TREE_INT_CST_HIGH (elt),
9820 /* Initialize remaining elements to 0. */
9821 for (; i < units; ++i)
9822 RTVEC_ELT (v, i) = CONST0_RTX (inner);
9824 return gen_rtx_raw_CONST_VECTOR (mode, v);
9827 #include "gt-expr.h"