1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 #include "coretypes.h"
32 #include "hard-reg-set.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
44 #include "typeclass.h"
47 #include "langhooks.h"
50 #include "tree-iterator.h"
51 #include "tree-pass.h"
52 #include "tree-flow.h"
56 /* Decide whether a function's arguments should be processed
57 from first to last or from last to first.
59 They should if the stack and args grow in opposite directions, but
60 only if we have push insns. */
64 #ifndef PUSH_ARGS_REVERSED
65 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
66 #define PUSH_ARGS_REVERSED /* If it's last to first. */
72 #ifndef STACK_PUSH_CODE
73 #ifdef STACK_GROWS_DOWNWARD
74 #define STACK_PUSH_CODE PRE_DEC
76 #define STACK_PUSH_CODE PRE_INC
80 /* Convert defined/undefined to boolean. */
81 #ifdef TARGET_MEM_FUNCTIONS
82 #undef TARGET_MEM_FUNCTIONS
83 #define TARGET_MEM_FUNCTIONS 1
85 #define TARGET_MEM_FUNCTIONS 0
89 /* If this is nonzero, we do not bother generating VOLATILE
90 around volatile memory references, and we are willing to
91 output indirect addresses. If cse is to follow, we reject
92 indirect addresses so a useful potential cse is generated;
93 if it is used only once, instruction combination will produce
94 the same indirect address eventually. */
97 /* This structure is used by move_by_pieces to describe the move to
108 int explicit_inc_from;
109 unsigned HOST_WIDE_INT len;
110 HOST_WIDE_INT offset;
114 /* This structure is used by store_by_pieces to describe the clear to
117 struct store_by_pieces
123 unsigned HOST_WIDE_INT len;
124 HOST_WIDE_INT offset;
125 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
130 static rtx enqueue_insn (rtx, rtx);
131 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
133 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
134 struct move_by_pieces *);
135 static bool block_move_libcall_safe_for_call_parm (void);
136 static bool emit_block_move_via_movstr (rtx, rtx, rtx, unsigned);
137 static rtx emit_block_move_via_libcall (rtx, rtx, rtx);
138 static tree emit_block_move_libcall_fn (int);
139 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
140 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
141 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
142 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
143 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
144 struct store_by_pieces *);
145 static bool clear_storage_via_clrstr (rtx, rtx, unsigned);
146 static rtx clear_storage_via_libcall (rtx, rtx);
147 static tree clear_storage_libcall_fn (int);
148 static rtx compress_float_constant (rtx, rtx);
149 static rtx get_subtarget (rtx);
150 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
151 HOST_WIDE_INT, enum machine_mode,
152 tree, tree, int, int);
153 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
154 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
155 tree, enum machine_mode, int, tree, int);
156 static rtx var_rtx (tree);
158 static unsigned HOST_WIDE_INT highest_pow2_factor (tree);
159 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (tree, tree);
161 static int is_aligning_offset (tree, tree);
162 static rtx expand_increment (tree, int, int);
163 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
164 enum expand_modifier);
165 static rtx do_store_flag (tree, rtx, enum machine_mode, int);
167 static void emit_single_push_insn (enum machine_mode, rtx, tree);
169 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
170 static rtx const_vector_from_tree (tree);
172 /* Record for each mode whether we can move a register directly to or
173 from an object of that mode in memory. If we can't, we won't try
174 to use that mode directly when accessing a field of that mode. */
176 static char direct_load[NUM_MACHINE_MODES];
177 static char direct_store[NUM_MACHINE_MODES];
179 /* Record for each mode whether we can float-extend from memory. */
181 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
183 /* This macro is used to determine whether move_by_pieces should be called
184 to perform a structure copy. */
185 #ifndef MOVE_BY_PIECES_P
186 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
187 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
190 /* This macro is used to determine whether clear_by_pieces should be
191 called to clear storage. */
192 #ifndef CLEAR_BY_PIECES_P
193 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
194 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
197 /* This macro is used to determine whether store_by_pieces should be
198 called to "memset" storage with byte values other than zero, or
199 to "memcpy" storage when the source is a constant string. */
200 #ifndef STORE_BY_PIECES_P
201 #define STORE_BY_PIECES_P(SIZE, ALIGN) MOVE_BY_PIECES_P (SIZE, ALIGN)
204 /* This array records the insn_code of insns to perform block moves. */
205 enum insn_code movstr_optab[NUM_MACHINE_MODES];
207 /* This array records the insn_code of insns to perform block clears. */
208 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
210 /* These arrays record the insn_code of two different kinds of insns
211 to perform block compares. */
212 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
213 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
215 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
217 #ifndef SLOW_UNALIGNED_ACCESS
218 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
221 /* This is run once per compilation to set up which modes can be used
222 directly in memory and to initialize the block move optab. */
225 init_expr_once (void)
228 enum machine_mode mode;
233 /* Try indexing by frame ptr and try by stack ptr.
234 It is known that on the Convex the stack ptr isn't a valid index.
235 With luck, one or the other is valid on any machine. */
236 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
237 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
239 /* A scratch register we can modify in-place below to avoid
240 useless RTL allocations. */
241 reg = gen_rtx_REG (VOIDmode, -1);
243 insn = rtx_alloc (INSN);
244 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
245 PATTERN (insn) = pat;
247 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
248 mode = (enum machine_mode) ((int) mode + 1))
252 direct_load[(int) mode] = direct_store[(int) mode] = 0;
253 PUT_MODE (mem, mode);
254 PUT_MODE (mem1, mode);
255 PUT_MODE (reg, mode);
257 /* See if there is some register that can be used in this mode and
258 directly loaded or stored from memory. */
260 if (mode != VOIDmode && mode != BLKmode)
261 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
262 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
265 if (! HARD_REGNO_MODE_OK (regno, mode))
271 SET_DEST (pat) = reg;
272 if (recog (pat, insn, &num_clobbers) >= 0)
273 direct_load[(int) mode] = 1;
275 SET_SRC (pat) = mem1;
276 SET_DEST (pat) = reg;
277 if (recog (pat, insn, &num_clobbers) >= 0)
278 direct_load[(int) mode] = 1;
281 SET_DEST (pat) = mem;
282 if (recog (pat, insn, &num_clobbers) >= 0)
283 direct_store[(int) mode] = 1;
286 SET_DEST (pat) = mem1;
287 if (recog (pat, insn, &num_clobbers) >= 0)
288 direct_store[(int) mode] = 1;
292 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
294 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
295 mode = GET_MODE_WIDER_MODE (mode))
297 enum machine_mode srcmode;
298 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
299 srcmode = GET_MODE_WIDER_MODE (srcmode))
303 ic = can_extend_p (mode, srcmode, 0);
304 if (ic == CODE_FOR_nothing)
307 PUT_MODE (mem, srcmode);
309 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
310 float_extend_from_mem[mode][srcmode] = true;
315 /* This is run at the start of compiling a function. */
320 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
323 /* Small sanity check that the queue is empty at the end of a function. */
326 finish_expr_for_function (void)
332 /* Manage the queue of increment instructions to be output
333 for POSTINCREMENT_EXPR expressions, etc. */
335 /* Queue up to increment (or change) VAR later. BODY says how:
336 BODY should be the same thing you would pass to emit_insn
337 to increment right away. It will go to emit_insn later on.
339 The value is a QUEUED expression to be used in place of VAR
340 where you want to guarantee the pre-incrementation value of VAR. */
343 enqueue_insn (rtx var, rtx body)
345 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
346 body, pending_chain);
347 return pending_chain;
350 /* Use protect_from_queue to convert a QUEUED expression
351 into something that you can put immediately into an instruction.
352 If the queued incrementation has not happened yet,
353 protect_from_queue returns the variable itself.
354 If the incrementation has happened, protect_from_queue returns a temp
355 that contains a copy of the old value of the variable.
357 Any time an rtx which might possibly be a QUEUED is to be put
358 into an instruction, it must be passed through protect_from_queue first.
359 QUEUED expressions are not meaningful in instructions.
361 Do not pass a value through protect_from_queue and then hold
362 on to it for a while before putting it in an instruction!
363 If the queue is flushed in between, incorrect code will result. */
366 protect_from_queue (rtx x, int modify)
368 RTX_CODE code = GET_CODE (x);
370 #if 0 /* A QUEUED can hang around after the queue is forced out. */
371 /* Shortcut for most common case. */
372 if (pending_chain == 0)
378 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
379 use of autoincrement. Make a copy of the contents of the memory
380 location rather than a copy of the address, but not if the value is
381 of mode BLKmode. Don't modify X in place since it might be
383 if (code == MEM && GET_MODE (x) != BLKmode
384 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
387 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
391 rtx temp = gen_reg_rtx (GET_MODE (x));
393 emit_insn_before (gen_move_insn (temp, new),
398 /* Copy the address into a pseudo, so that the returned value
399 remains correct across calls to emit_queue. */
400 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
403 /* Otherwise, recursively protect the subexpressions of all
404 the kinds of rtx's that can contain a QUEUED. */
407 rtx tem = protect_from_queue (XEXP (x, 0), 0);
408 if (tem != XEXP (x, 0))
414 else if (code == PLUS || code == MULT)
416 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
417 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
418 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
427 /* If the increment has not happened, use the variable itself. Copy it
428 into a new pseudo so that the value remains correct across calls to
430 if (QUEUED_INSN (x) == 0)
431 return copy_to_reg (QUEUED_VAR (x));
432 /* If the increment has happened and a pre-increment copy exists,
434 if (QUEUED_COPY (x) != 0)
435 return QUEUED_COPY (x);
436 /* The increment has happened but we haven't set up a pre-increment copy.
437 Set one up now, and use it. */
438 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
439 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
441 return QUEUED_COPY (x);
444 /* Return nonzero if X contains a QUEUED expression:
445 if it contains anything that will be altered by a queued increment.
446 We handle only combinations of MEM, PLUS, MINUS and MULT operators
447 since memory addresses generally contain only those. */
450 queued_subexp_p (rtx x)
452 enum rtx_code code = GET_CODE (x);
458 return queued_subexp_p (XEXP (x, 0));
462 return (queued_subexp_p (XEXP (x, 0))
463 || queued_subexp_p (XEXP (x, 1)));
469 /* Retrieve a mark on the queue. */
474 return pending_chain;
477 /* Perform all the pending incrementations that have been enqueued
478 after MARK was retrieved. If MARK is null, perform all the
479 pending incrementations. */
482 emit_insns_enqueued_after_mark (rtx mark)
486 /* The marked incrementation may have been emitted in the meantime
487 through a call to emit_queue. In this case, the mark is not valid
488 anymore so do nothing. */
489 if (mark && ! QUEUED_BODY (mark))
492 while ((p = pending_chain) != mark)
494 rtx body = QUEUED_BODY (p);
496 switch (GET_CODE (body))
504 QUEUED_INSN (p) = body;
508 #ifdef ENABLE_CHECKING
515 QUEUED_INSN (p) = emit_insn (body);
520 pending_chain = QUEUED_NEXT (p);
524 /* Perform all the pending incrementations. */
529 emit_insns_enqueued_after_mark (NULL_RTX);
532 /* Copy data from FROM to TO, where the machine modes are not the same.
533 Both modes may be integer, or both may be floating.
534 UNSIGNEDP should be nonzero if FROM is an unsigned type.
535 This causes zero-extension instead of sign-extension. */
538 convert_move (rtx to, rtx from, int unsignedp)
540 enum machine_mode to_mode = GET_MODE (to);
541 enum machine_mode from_mode = GET_MODE (from);
542 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
543 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
547 /* rtx code for making an equivalent value. */
548 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
549 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
551 to = protect_from_queue (to, 1);
552 from = protect_from_queue (from, 0);
554 if (to_real != from_real)
557 /* If the source and destination are already the same, then there's
562 /* If FROM is a SUBREG that indicates that we have already done at least
563 the required extension, strip it. We don't handle such SUBREGs as
566 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
567 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
568 >= GET_MODE_SIZE (to_mode))
569 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
570 from = gen_lowpart (to_mode, from), from_mode = to_mode;
572 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
575 if (to_mode == from_mode
576 || (from_mode == VOIDmode && CONSTANT_P (from)))
578 emit_move_insn (to, from);
582 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
584 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
587 if (VECTOR_MODE_P (to_mode))
588 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
590 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
592 emit_move_insn (to, from);
596 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
598 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
599 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
608 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
610 else if (GET_MODE_PRECISION (from_mode) > GET_MODE_PRECISION (to_mode))
615 /* Try converting directly if the insn is supported. */
617 code = tab->handlers[to_mode][from_mode].insn_code;
618 if (code != CODE_FOR_nothing)
620 emit_unop_insn (code, to, from,
621 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
625 /* Otherwise use a libcall. */
626 libcall = tab->handlers[to_mode][from_mode].libfunc;
629 /* This conversion is not implemented yet. */
633 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
635 insns = get_insns ();
637 emit_libcall_block (insns, to, value,
638 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
640 : gen_rtx_FLOAT_EXTEND (to_mode, from));
644 /* Handle pointer conversion. */ /* SPEE 900220. */
645 /* Targets are expected to provide conversion insns between PxImode and
646 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
647 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
649 enum machine_mode full_mode
650 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
652 if (trunc_optab->handlers[to_mode][full_mode].insn_code
656 if (full_mode != from_mode)
657 from = convert_to_mode (full_mode, from, unsignedp);
658 emit_unop_insn (trunc_optab->handlers[to_mode][full_mode].insn_code,
662 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
664 enum machine_mode full_mode
665 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
667 if (sext_optab->handlers[full_mode][from_mode].insn_code
671 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
673 if (to_mode == full_mode)
676 /* else proceed to integer conversions below. */
677 from_mode = full_mode;
680 /* Now both modes are integers. */
682 /* Handle expanding beyond a word. */
683 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
684 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
691 enum machine_mode lowpart_mode;
692 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
694 /* Try converting directly if the insn is supported. */
695 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
698 /* If FROM is a SUBREG, put it into a register. Do this
699 so that we always generate the same set of insns for
700 better cse'ing; if an intermediate assignment occurred,
701 we won't be doing the operation directly on the SUBREG. */
702 if (optimize > 0 && GET_CODE (from) == SUBREG)
703 from = force_reg (from_mode, from);
704 emit_unop_insn (code, to, from, equiv_code);
707 /* Next, try converting via full word. */
708 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
709 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
710 != CODE_FOR_nothing))
714 if (reg_overlap_mentioned_p (to, from))
715 from = force_reg (from_mode, from);
716 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
718 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
719 emit_unop_insn (code, to,
720 gen_lowpart (word_mode, to), equiv_code);
724 /* No special multiword conversion insn; do it by hand. */
727 /* Since we will turn this into a no conflict block, we must ensure
728 that the source does not overlap the target. */
730 if (reg_overlap_mentioned_p (to, from))
731 from = force_reg (from_mode, from);
733 /* Get a copy of FROM widened to a word, if necessary. */
734 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
735 lowpart_mode = word_mode;
737 lowpart_mode = from_mode;
739 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
741 lowpart = gen_lowpart (lowpart_mode, to);
742 emit_move_insn (lowpart, lowfrom);
744 /* Compute the value to put in each remaining word. */
746 fill_value = const0_rtx;
751 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
752 && STORE_FLAG_VALUE == -1)
754 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
756 fill_value = gen_reg_rtx (word_mode);
757 emit_insn (gen_slt (fill_value));
763 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
764 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
766 fill_value = convert_to_mode (word_mode, fill_value, 1);
770 /* Fill the remaining words. */
771 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
773 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
774 rtx subword = operand_subword (to, index, 1, to_mode);
779 if (fill_value != subword)
780 emit_move_insn (subword, fill_value);
783 insns = get_insns ();
786 emit_no_conflict_block (insns, to, from, NULL_RTX,
787 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
791 /* Truncating multi-word to a word or less. */
792 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
793 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
795 if (!((GET_CODE (from) == MEM
796 && ! MEM_VOLATILE_P (from)
797 && direct_load[(int) to_mode]
798 && ! mode_dependent_address_p (XEXP (from, 0)))
800 || GET_CODE (from) == SUBREG))
801 from = force_reg (from_mode, from);
802 convert_move (to, gen_lowpart (word_mode, from), 0);
806 /* Now follow all the conversions between integers
807 no more than a word long. */
809 /* For truncation, usually we can just refer to FROM in a narrower mode. */
810 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
811 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
812 GET_MODE_BITSIZE (from_mode)))
814 if (!((GET_CODE (from) == MEM
815 && ! MEM_VOLATILE_P (from)
816 && direct_load[(int) to_mode]
817 && ! mode_dependent_address_p (XEXP (from, 0)))
819 || GET_CODE (from) == SUBREG))
820 from = force_reg (from_mode, from);
821 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
822 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
823 from = copy_to_reg (from);
824 emit_move_insn (to, gen_lowpart (to_mode, from));
828 /* Handle extension. */
829 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
831 /* Convert directly if that works. */
832 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
836 from = force_not_mem (from);
838 emit_unop_insn (code, to, from, equiv_code);
843 enum machine_mode intermediate;
847 /* Search for a mode to convert via. */
848 for (intermediate = from_mode; intermediate != VOIDmode;
849 intermediate = GET_MODE_WIDER_MODE (intermediate))
850 if (((can_extend_p (to_mode, intermediate, unsignedp)
852 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
853 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
854 GET_MODE_BITSIZE (intermediate))))
855 && (can_extend_p (intermediate, from_mode, unsignedp)
856 != CODE_FOR_nothing))
858 convert_move (to, convert_to_mode (intermediate, from,
859 unsignedp), unsignedp);
863 /* No suitable intermediate mode.
864 Generate what we need with shifts. */
865 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
866 - GET_MODE_BITSIZE (from_mode), 0);
867 from = gen_lowpart (to_mode, force_reg (from_mode, from));
868 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
870 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
873 emit_move_insn (to, tmp);
878 /* Support special truncate insns for certain modes. */
879 if (trunc_optab->handlers[to_mode][from_mode].insn_code != CODE_FOR_nothing)
881 emit_unop_insn (trunc_optab->handlers[to_mode][from_mode].insn_code,
886 /* Handle truncation of volatile memrefs, and so on;
887 the things that couldn't be truncated directly,
888 and for which there was no special instruction.
890 ??? Code above formerly short-circuited this, for most integer
891 mode pairs, with a force_reg in from_mode followed by a recursive
892 call to this routine. Appears always to have been wrong. */
893 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
895 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
896 emit_move_insn (to, temp);
900 /* Mode combination is not recognized. */
904 /* Return an rtx for a value that would result
905 from converting X to mode MODE.
906 Both X and MODE may be floating, or both integer.
907 UNSIGNEDP is nonzero if X is an unsigned value.
908 This can be done by referring to a part of X in place
909 or by copying to a new temporary with conversion.
911 This function *must not* call protect_from_queue
912 except when putting X into an insn (in which case convert_move does it). */
915 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
917 return convert_modes (mode, VOIDmode, x, unsignedp);
920 /* Return an rtx for a value that would result
921 from converting X from mode OLDMODE to mode MODE.
922 Both modes may be floating, or both integer.
923 UNSIGNEDP is nonzero if X is an unsigned value.
925 This can be done by referring to a part of X in place
926 or by copying to a new temporary with conversion.
928 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
930 This function *must not* call protect_from_queue
931 except when putting X into an insn (in which case convert_move does it). */
934 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
938 /* If FROM is a SUBREG that indicates that we have already done at least
939 the required extension, strip it. */
941 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
942 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
943 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
944 x = gen_lowpart (mode, x);
946 if (GET_MODE (x) != VOIDmode)
947 oldmode = GET_MODE (x);
952 /* There is one case that we must handle specially: If we are converting
953 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
954 we are to interpret the constant as unsigned, gen_lowpart will do
955 the wrong if the constant appears negative. What we want to do is
956 make the high-order word of the constant zero, not all ones. */
958 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
959 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
960 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
962 HOST_WIDE_INT val = INTVAL (x);
964 if (oldmode != VOIDmode
965 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
967 int width = GET_MODE_BITSIZE (oldmode);
969 /* We need to zero extend VAL. */
970 val &= ((HOST_WIDE_INT) 1 << width) - 1;
973 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
976 /* We can do this with a gen_lowpart if both desired and current modes
977 are integer, and this is either a constant integer, a register, or a
978 non-volatile MEM. Except for the constant case where MODE is no
979 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
981 if ((GET_CODE (x) == CONST_INT
982 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
983 || (GET_MODE_CLASS (mode) == MODE_INT
984 && GET_MODE_CLASS (oldmode) == MODE_INT
985 && (GET_CODE (x) == CONST_DOUBLE
986 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
987 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
988 && direct_load[(int) mode])
990 && (! HARD_REGISTER_P (x)
991 || HARD_REGNO_MODE_OK (REGNO (x), mode))
992 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
993 GET_MODE_BITSIZE (GET_MODE (x)))))))))
995 /* ?? If we don't know OLDMODE, we have to assume here that
996 X does not need sign- or zero-extension. This may not be
997 the case, but it's the best we can do. */
998 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
999 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1001 HOST_WIDE_INT val = INTVAL (x);
1002 int width = GET_MODE_BITSIZE (oldmode);
1004 /* We must sign or zero-extend in this case. Start by
1005 zero-extending, then sign extend if we need to. */
1006 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1008 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1009 val |= (HOST_WIDE_INT) (-1) << width;
1011 return gen_int_mode (val, mode);
1014 return gen_lowpart (mode, x);
1017 /* Converting from integer constant into mode is always equivalent to an
1018 subreg operation. */
1019 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
1021 if (GET_MODE_BITSIZE (mode) != GET_MODE_BITSIZE (oldmode))
1023 return simplify_gen_subreg (mode, x, oldmode, 0);
1026 temp = gen_reg_rtx (mode);
1027 convert_move (temp, x, unsignedp);
1031 /* STORE_MAX_PIECES is the number of bytes at a time that we can
1032 store efficiently. Due to internal GCC limitations, this is
1033 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1034 for an immediate constant. */
1036 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1038 /* Determine whether the LEN bytes can be moved by using several move
1039 instructions. Return nonzero if a call to move_by_pieces should
1043 can_move_by_pieces (unsigned HOST_WIDE_INT len,
1044 unsigned int align ATTRIBUTE_UNUSED)
1046 return MOVE_BY_PIECES_P (len, align);
1049 /* Generate several move instructions to copy LEN bytes from block FROM to
1050 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1051 and TO through protect_from_queue before calling.
1053 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1054 used to push FROM to the stack.
1056 ALIGN is maximum stack alignment we can assume.
1058 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
1059 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
1063 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
1064 unsigned int align, int endp)
1066 struct move_by_pieces data;
1067 rtx to_addr, from_addr = XEXP (from, 0);
1068 unsigned int max_size = MOVE_MAX_PIECES + 1;
1069 enum machine_mode mode = VOIDmode, tmode;
1070 enum insn_code icode;
1072 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
1075 data.from_addr = from_addr;
1078 to_addr = XEXP (to, 0);
1081 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1082 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1084 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1091 #ifdef STACK_GROWS_DOWNWARD
1097 data.to_addr = to_addr;
1100 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1101 || GET_CODE (from_addr) == POST_INC
1102 || GET_CODE (from_addr) == POST_DEC);
1104 data.explicit_inc_from = 0;
1105 data.explicit_inc_to = 0;
1106 if (data.reverse) data.offset = len;
1109 /* If copying requires more than two move insns,
1110 copy addresses to registers (to make displacements shorter)
1111 and use post-increment if available. */
1112 if (!(data.autinc_from && data.autinc_to)
1113 && move_by_pieces_ninsns (len, align) > 2)
1115 /* Find the mode of the largest move... */
1116 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1117 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1118 if (GET_MODE_SIZE (tmode) < max_size)
1121 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1123 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1124 data.autinc_from = 1;
1125 data.explicit_inc_from = -1;
1127 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1129 data.from_addr = copy_addr_to_reg (from_addr);
1130 data.autinc_from = 1;
1131 data.explicit_inc_from = 1;
1133 if (!data.autinc_from && CONSTANT_P (from_addr))
1134 data.from_addr = copy_addr_to_reg (from_addr);
1135 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1137 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1139 data.explicit_inc_to = -1;
1141 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1143 data.to_addr = copy_addr_to_reg (to_addr);
1145 data.explicit_inc_to = 1;
1147 if (!data.autinc_to && CONSTANT_P (to_addr))
1148 data.to_addr = copy_addr_to_reg (to_addr);
1151 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1152 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1153 align = MOVE_MAX * BITS_PER_UNIT;
1155 /* First move what we can in the largest integer mode, then go to
1156 successively smaller modes. */
1158 while (max_size > 1)
1160 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1161 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1162 if (GET_MODE_SIZE (tmode) < max_size)
1165 if (mode == VOIDmode)
1168 icode = mov_optab->handlers[(int) mode].insn_code;
1169 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1170 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1172 max_size = GET_MODE_SIZE (mode);
1175 /* The code above should have handled everything. */
1189 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1190 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1192 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1195 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1202 to1 = adjust_address (data.to, QImode, data.offset);
1210 /* Return number of insns required to move L bytes by pieces.
1211 ALIGN (in bits) is maximum alignment we can assume. */
1213 static unsigned HOST_WIDE_INT
1214 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align)
1216 unsigned HOST_WIDE_INT n_insns = 0;
1217 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1219 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1220 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1221 align = MOVE_MAX * BITS_PER_UNIT;
1223 while (max_size > 1)
1225 enum machine_mode mode = VOIDmode, tmode;
1226 enum insn_code icode;
1228 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1229 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1230 if (GET_MODE_SIZE (tmode) < max_size)
1233 if (mode == VOIDmode)
1236 icode = mov_optab->handlers[(int) mode].insn_code;
1237 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1238 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1240 max_size = GET_MODE_SIZE (mode);
1248 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1249 with move instructions for mode MODE. GENFUN is the gen_... function
1250 to make a move insn for that mode. DATA has all the other info. */
1253 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1254 struct move_by_pieces *data)
1256 unsigned int size = GET_MODE_SIZE (mode);
1257 rtx to1 = NULL_RTX, from1;
1259 while (data->len >= size)
1262 data->offset -= size;
1266 if (data->autinc_to)
1267 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1270 to1 = adjust_address (data->to, mode, data->offset);
1273 if (data->autinc_from)
1274 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1277 from1 = adjust_address (data->from, mode, data->offset);
1279 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1280 emit_insn (gen_add2_insn (data->to_addr,
1281 GEN_INT (-(HOST_WIDE_INT)size)));
1282 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1283 emit_insn (gen_add2_insn (data->from_addr,
1284 GEN_INT (-(HOST_WIDE_INT)size)));
1287 emit_insn ((*genfun) (to1, from1));
1290 #ifdef PUSH_ROUNDING
1291 emit_single_push_insn (mode, from1, NULL);
1297 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1298 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1299 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1300 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1302 if (! data->reverse)
1303 data->offset += size;
1309 /* Emit code to move a block Y to a block X. This may be done with
1310 string-move instructions, with multiple scalar move instructions,
1311 or with a library call.
1313 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1314 SIZE is an rtx that says how long they are.
1315 ALIGN is the maximum alignment we can assume they have.
1316 METHOD describes what kind of copy this is, and what mechanisms may be used.
1318 Return the address of the new block, if memcpy is called and returns it,
1322 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1330 case BLOCK_OP_NORMAL:
1331 may_use_call = true;
1334 case BLOCK_OP_CALL_PARM:
1335 may_use_call = block_move_libcall_safe_for_call_parm ();
1337 /* Make inhibit_defer_pop nonzero around the library call
1338 to force it to pop the arguments right away. */
1342 case BLOCK_OP_NO_LIBCALL:
1343 may_use_call = false;
1350 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1352 if (GET_MODE (x) != BLKmode)
1354 if (GET_MODE (y) != BLKmode)
1357 x = protect_from_queue (x, 1);
1358 y = protect_from_queue (y, 0);
1359 size = protect_from_queue (size, 0);
1361 if (GET_CODE (x) != MEM)
1363 if (GET_CODE (y) != MEM)
1368 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1369 can be incorrect is coming from __builtin_memcpy. */
1370 if (GET_CODE (size) == CONST_INT)
1372 if (INTVAL (size) == 0)
1375 x = shallow_copy_rtx (x);
1376 y = shallow_copy_rtx (y);
1377 set_mem_size (x, size);
1378 set_mem_size (y, size);
1381 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1382 move_by_pieces (x, y, INTVAL (size), align, 0);
1383 else if (emit_block_move_via_movstr (x, y, size, align))
1385 else if (may_use_call)
1386 retval = emit_block_move_via_libcall (x, y, size);
1388 emit_block_move_via_loop (x, y, size, align);
1390 if (method == BLOCK_OP_CALL_PARM)
1396 /* A subroutine of emit_block_move. Returns true if calling the
1397 block move libcall will not clobber any parameters which may have
1398 already been placed on the stack. */
1401 block_move_libcall_safe_for_call_parm (void)
1403 /* If arguments are pushed on the stack, then they're safe. */
1407 /* If registers go on the stack anyway, any argument is sure to clobber
1408 an outgoing argument. */
1409 #if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1411 tree fn = emit_block_move_libcall_fn (false);
1413 if (REG_PARM_STACK_SPACE (fn) != 0)
1418 /* If any argument goes in memory, then it might clobber an outgoing
1421 CUMULATIVE_ARGS args_so_far;
1424 fn = emit_block_move_libcall_fn (false);
1425 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1427 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1428 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1430 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1431 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1432 if (!tmp || !REG_P (tmp))
1434 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1435 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1439 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1445 /* A subroutine of emit_block_move. Expand a movstr pattern;
1446 return true if successful. */
1449 emit_block_move_via_movstr (rtx x, rtx y, rtx size, unsigned int align)
1451 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1452 int save_volatile_ok = volatile_ok;
1453 enum machine_mode mode;
1455 /* Since this is a move insn, we don't care about volatility. */
1458 /* Try the most limited insn first, because there's no point
1459 including more than one in the machine description unless
1460 the more limited one has some advantage. */
1462 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1463 mode = GET_MODE_WIDER_MODE (mode))
1465 enum insn_code code = movstr_optab[(int) mode];
1466 insn_operand_predicate_fn pred;
1468 if (code != CODE_FOR_nothing
1469 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1470 here because if SIZE is less than the mode mask, as it is
1471 returned by the macro, it will definitely be less than the
1472 actual mode mask. */
1473 && ((GET_CODE (size) == CONST_INT
1474 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1475 <= (GET_MODE_MASK (mode) >> 1)))
1476 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1477 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1478 || (*pred) (x, BLKmode))
1479 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1480 || (*pred) (y, BLKmode))
1481 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1482 || (*pred) (opalign, VOIDmode)))
1485 rtx last = get_last_insn ();
1488 op2 = convert_to_mode (mode, size, 1);
1489 pred = insn_data[(int) code].operand[2].predicate;
1490 if (pred != 0 && ! (*pred) (op2, mode))
1491 op2 = copy_to_mode_reg (mode, op2);
1493 /* ??? When called via emit_block_move_for_call, it'd be
1494 nice if there were some way to inform the backend, so
1495 that it doesn't fail the expansion because it thinks
1496 emitting the libcall would be more efficient. */
1498 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1502 volatile_ok = save_volatile_ok;
1506 delete_insns_since (last);
1510 volatile_ok = save_volatile_ok;
1514 /* A subroutine of emit_block_move. Expand a call to memcpy or bcopy.
1515 Return the return value from memcpy, 0 otherwise. */
1518 emit_block_move_via_libcall (rtx dst, rtx src, rtx size)
1520 rtx dst_addr, src_addr;
1521 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1522 enum machine_mode size_mode;
1525 /* DST, SRC, or SIZE may have been passed through protect_from_queue.
1527 It is unsafe to save the value generated by protect_from_queue and reuse
1528 it later. Consider what happens if emit_queue is called before the
1529 return value from protect_from_queue is used.
1531 Expansion of the CALL_EXPR below will call emit_queue before we are
1532 finished emitting RTL for argument setup. So if we are not careful we
1533 could get the wrong value for an argument.
1535 To avoid this problem we go ahead and emit code to copy the addresses of
1536 DST and SRC and SIZE into new pseudos. We can then place those new
1537 pseudos into an RTL_EXPR and use them later, even after a call to
1540 Note this is not strictly needed for library calls since they do not call
1541 emit_queue before loading their arguments. However, we may need to have
1542 library calls call emit_queue in the future since failing to do so could
1543 cause problems for targets which define SMALL_REGISTER_CLASSES and pass
1544 arguments in registers. */
1546 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1547 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1549 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1550 src_addr = convert_memory_address (ptr_mode, src_addr);
1552 dst_tree = make_tree (ptr_type_node, dst_addr);
1553 src_tree = make_tree (ptr_type_node, src_addr);
1555 if (TARGET_MEM_FUNCTIONS)
1556 size_mode = TYPE_MODE (sizetype);
1558 size_mode = TYPE_MODE (unsigned_type_node);
1560 size = convert_to_mode (size_mode, size, 1);
1561 size = copy_to_mode_reg (size_mode, size);
1563 /* It is incorrect to use the libcall calling conventions to call
1564 memcpy in this context. This could be a user call to memcpy and
1565 the user may wish to examine the return value from memcpy. For
1566 targets where libcalls and normal calls have different conventions
1567 for returning pointers, we could end up generating incorrect code.
1569 For convenience, we generate the call to bcopy this way as well. */
1571 if (TARGET_MEM_FUNCTIONS)
1572 size_tree = make_tree (sizetype, size);
1574 size_tree = make_tree (unsigned_type_node, size);
1576 fn = emit_block_move_libcall_fn (true);
1577 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1578 if (TARGET_MEM_FUNCTIONS)
1580 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1581 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1585 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1586 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1589 /* Now we have to build up the CALL_EXPR itself. */
1590 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1591 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1592 call_expr, arg_list, NULL_TREE);
1594 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1596 /* If we are initializing a readonly value, show the above call clobbered
1597 it. Otherwise, a load from it may erroneously be hoisted from a loop, or
1598 the delay slot scheduler might overlook conflicts and take nasty
1600 if (RTX_UNCHANGING_P (dst))
1601 add_function_usage_to
1602 (last_call_insn (), gen_rtx_EXPR_LIST (VOIDmode,
1603 gen_rtx_CLOBBER (VOIDmode, dst),
1606 return TARGET_MEM_FUNCTIONS ? retval : NULL_RTX;
1609 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1610 for the function we use for block copies. The first time FOR_CALL
1611 is true, we call assemble_external. */
1613 static GTY(()) tree block_move_fn;
1616 init_block_move_fn (const char *asmspec)
1622 if (TARGET_MEM_FUNCTIONS)
1624 fn = get_identifier ("memcpy");
1625 args = build_function_type_list (ptr_type_node, ptr_type_node,
1626 const_ptr_type_node, sizetype,
1631 fn = get_identifier ("bcopy");
1632 args = build_function_type_list (void_type_node, const_ptr_type_node,
1633 ptr_type_node, unsigned_type_node,
1637 fn = build_decl (FUNCTION_DECL, fn, args);
1638 DECL_EXTERNAL (fn) = 1;
1639 TREE_PUBLIC (fn) = 1;
1640 DECL_ARTIFICIAL (fn) = 1;
1641 TREE_NOTHROW (fn) = 1;
1648 SET_DECL_RTL (block_move_fn, NULL_RTX);
1649 SET_DECL_ASSEMBLER_NAME (block_move_fn, get_identifier (asmspec));
1654 emit_block_move_libcall_fn (int for_call)
1656 static bool emitted_extern;
1659 init_block_move_fn (NULL);
1661 if (for_call && !emitted_extern)
1663 emitted_extern = true;
1664 make_decl_rtl (block_move_fn, NULL);
1665 assemble_external (block_move_fn);
1668 return block_move_fn;
1671 /* A subroutine of emit_block_move. Copy the data via an explicit
1672 loop. This is used only when libcalls are forbidden. */
1673 /* ??? It'd be nice to copy in hunks larger than QImode. */
1676 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1677 unsigned int align ATTRIBUTE_UNUSED)
1679 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1680 enum machine_mode iter_mode;
1682 iter_mode = GET_MODE (size);
1683 if (iter_mode == VOIDmode)
1684 iter_mode = word_mode;
1686 top_label = gen_label_rtx ();
1687 cmp_label = gen_label_rtx ();
1688 iter = gen_reg_rtx (iter_mode);
1690 emit_move_insn (iter, const0_rtx);
1692 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1693 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1694 do_pending_stack_adjust ();
1696 emit_jump (cmp_label);
1697 emit_label (top_label);
1699 tmp = convert_modes (Pmode, iter_mode, iter, true);
1700 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1701 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1702 x = change_address (x, QImode, x_addr);
1703 y = change_address (y, QImode, y_addr);
1705 emit_move_insn (x, y);
1707 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1708 true, OPTAB_LIB_WIDEN);
1710 emit_move_insn (iter, tmp);
1712 emit_label (cmp_label);
1714 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1718 /* Copy all or part of a value X into registers starting at REGNO.
1719 The number of registers to be filled is NREGS. */
1722 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1725 #ifdef HAVE_load_multiple
1733 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1734 x = validize_mem (force_const_mem (mode, x));
1736 /* See if the machine can do this with a load multiple insn. */
1737 #ifdef HAVE_load_multiple
1738 if (HAVE_load_multiple)
1740 last = get_last_insn ();
1741 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1749 delete_insns_since (last);
1753 for (i = 0; i < nregs; i++)
1754 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1755 operand_subword_force (x, i, mode));
1758 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1759 The number of registers to be filled is NREGS. */
1762 move_block_from_reg (int regno, rtx x, int nregs)
1769 /* See if the machine can do this with a store multiple insn. */
1770 #ifdef HAVE_store_multiple
1771 if (HAVE_store_multiple)
1773 rtx last = get_last_insn ();
1774 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1782 delete_insns_since (last);
1786 for (i = 0; i < nregs; i++)
1788 rtx tem = operand_subword (x, i, 1, BLKmode);
1793 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1797 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1798 ORIG, where ORIG is a non-consecutive group of registers represented by
1799 a PARALLEL. The clone is identical to the original except in that the
1800 original set of registers is replaced by a new set of pseudo registers.
1801 The new set has the same modes as the original set. */
1804 gen_group_rtx (rtx orig)
1809 if (GET_CODE (orig) != PARALLEL)
1812 length = XVECLEN (orig, 0);
1813 tmps = alloca (sizeof (rtx) * length);
1815 /* Skip a NULL entry in first slot. */
1816 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1821 for (; i < length; i++)
1823 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1824 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1826 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1829 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1832 /* Emit code to move a block ORIG_SRC of type TYPE to a block DST,
1833 where DST is non-consecutive registers represented by a PARALLEL.
1834 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1838 emit_group_load (rtx dst, rtx orig_src, tree type ATTRIBUTE_UNUSED, int ssize)
1843 if (GET_CODE (dst) != PARALLEL)
1846 /* Check for a NULL entry, used to indicate that the parameter goes
1847 both on the stack and in registers. */
1848 if (XEXP (XVECEXP (dst, 0, 0), 0))
1853 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
1855 /* Process the pieces. */
1856 for (i = start; i < XVECLEN (dst, 0); i++)
1858 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1859 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1860 unsigned int bytelen = GET_MODE_SIZE (mode);
1863 /* Handle trailing fragments that run over the size of the struct. */
1864 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1866 /* Arrange to shift the fragment to where it belongs.
1867 extract_bit_field loads to the lsb of the reg. */
1869 #ifdef BLOCK_REG_PADDING
1870 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1871 == (BYTES_BIG_ENDIAN ? upward : downward)
1876 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1877 bytelen = ssize - bytepos;
1882 /* If we won't be loading directly from memory, protect the real source
1883 from strange tricks we might play; but make sure that the source can
1884 be loaded directly into the destination. */
1886 if (GET_CODE (orig_src) != MEM
1887 && (!CONSTANT_P (orig_src)
1888 || (GET_MODE (orig_src) != mode
1889 && GET_MODE (orig_src) != VOIDmode)))
1891 if (GET_MODE (orig_src) == VOIDmode)
1892 src = gen_reg_rtx (mode);
1894 src = gen_reg_rtx (GET_MODE (orig_src));
1896 emit_move_insn (src, orig_src);
1899 /* Optimize the access just a bit. */
1900 if (GET_CODE (src) == MEM
1901 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1902 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1903 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1904 && bytelen == GET_MODE_SIZE (mode))
1906 tmps[i] = gen_reg_rtx (mode);
1907 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1909 else if (GET_CODE (src) == CONCAT)
1911 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1912 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1914 if ((bytepos == 0 && bytelen == slen0)
1915 || (bytepos != 0 && bytepos + bytelen <= slen))
1917 /* The following assumes that the concatenated objects all
1918 have the same size. In this case, a simple calculation
1919 can be used to determine the object and the bit field
1921 tmps[i] = XEXP (src, bytepos / slen0);
1922 if (! CONSTANT_P (tmps[i])
1923 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1924 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1925 (bytepos % slen0) * BITS_PER_UNIT,
1926 1, NULL_RTX, mode, mode, ssize);
1928 else if (bytepos == 0)
1930 rtx mem = assign_stack_temp (GET_MODE (src), slen, 0);
1931 emit_move_insn (mem, src);
1932 tmps[i] = adjust_address (mem, mode, 0);
1937 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1938 SIMD register, which is currently broken. While we get GCC
1939 to emit proper RTL for these cases, let's dump to memory. */
1940 else if (VECTOR_MODE_P (GET_MODE (dst))
1943 int slen = GET_MODE_SIZE (GET_MODE (src));
1946 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1947 emit_move_insn (mem, src);
1948 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1950 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1951 && XVECLEN (dst, 0) > 1)
1952 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1953 else if (CONSTANT_P (src)
1954 || (REG_P (src) && GET_MODE (src) == mode))
1957 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1958 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1962 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
1963 tmps[i], 0, OPTAB_WIDEN);
1968 /* Copy the extracted pieces into the proper (probable) hard regs. */
1969 for (i = start; i < XVECLEN (dst, 0); i++)
1970 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
1973 /* Emit code to move a block SRC to block DST, where SRC and DST are
1974 non-consecutive groups of registers, each represented by a PARALLEL. */
1977 emit_group_move (rtx dst, rtx src)
1981 if (GET_CODE (src) != PARALLEL
1982 || GET_CODE (dst) != PARALLEL
1983 || XVECLEN (src, 0) != XVECLEN (dst, 0))
1986 /* Skip first entry if NULL. */
1987 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1988 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1989 XEXP (XVECEXP (src, 0, i), 0));
1992 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1993 where SRC is non-consecutive registers represented by a PARALLEL.
1994 SSIZE represents the total size of block ORIG_DST, or -1 if not
1998 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
2003 if (GET_CODE (src) != PARALLEL)
2006 /* Check for a NULL entry, used to indicate that the parameter goes
2007 both on the stack and in registers. */
2008 if (XEXP (XVECEXP (src, 0, 0), 0))
2013 tmps = alloca (sizeof (rtx) * XVECLEN (src, 0));
2015 /* Copy the (probable) hard regs into pseudos. */
2016 for (i = start; i < XVECLEN (src, 0); i++)
2018 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2019 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2020 emit_move_insn (tmps[i], reg);
2024 /* If we won't be storing directly into memory, protect the real destination
2025 from strange tricks we might play. */
2027 if (GET_CODE (dst) == PARALLEL)
2031 /* We can get a PARALLEL dst if there is a conditional expression in
2032 a return statement. In that case, the dst and src are the same,
2033 so no action is necessary. */
2034 if (rtx_equal_p (dst, src))
2037 /* It is unclear if we can ever reach here, but we may as well handle
2038 it. Allocate a temporary, and split this into a store/load to/from
2041 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2042 emit_group_store (temp, src, type, ssize);
2043 emit_group_load (dst, temp, type, ssize);
2046 else if (GET_CODE (dst) != MEM && GET_CODE (dst) != CONCAT)
2048 dst = gen_reg_rtx (GET_MODE (orig_dst));
2049 /* Make life a bit easier for combine. */
2050 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
2053 /* Process the pieces. */
2054 for (i = start; i < XVECLEN (src, 0); i++)
2056 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2057 enum machine_mode mode = GET_MODE (tmps[i]);
2058 unsigned int bytelen = GET_MODE_SIZE (mode);
2061 /* Handle trailing fragments that run over the size of the struct. */
2062 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2064 /* store_bit_field always takes its value from the lsb.
2065 Move the fragment to the lsb if it's not already there. */
2067 #ifdef BLOCK_REG_PADDING
2068 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2069 == (BYTES_BIG_ENDIAN ? upward : downward)
2075 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2076 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2077 tmps[i], 0, OPTAB_WIDEN);
2079 bytelen = ssize - bytepos;
2082 if (GET_CODE (dst) == CONCAT)
2084 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2085 dest = XEXP (dst, 0);
2086 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2088 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2089 dest = XEXP (dst, 1);
2091 else if (bytepos == 0 && XVECLEN (src, 0))
2093 dest = assign_stack_temp (GET_MODE (dest),
2094 GET_MODE_SIZE (GET_MODE (dest)), 0);
2095 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2104 /* Optimize the access just a bit. */
2105 if (GET_CODE (dest) == MEM
2106 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2107 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2108 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2109 && bytelen == GET_MODE_SIZE (mode))
2110 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2112 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2113 mode, tmps[i], ssize);
2118 /* Copy from the pseudo into the (probable) hard reg. */
2119 if (orig_dst != dst)
2120 emit_move_insn (orig_dst, dst);
2123 /* Generate code to copy a BLKmode object of TYPE out of a
2124 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2125 is null, a stack temporary is created. TGTBLK is returned.
2127 The purpose of this routine is to handle functions that return
2128 BLKmode structures in registers. Some machines (the PA for example)
2129 want to return all small structures in registers regardless of the
2130 structure's alignment. */
2133 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2135 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2136 rtx src = NULL, dst = NULL;
2137 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2138 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2142 tgtblk = assign_temp (build_qualified_type (type,
2144 | TYPE_QUAL_CONST)),
2146 preserve_temp_slots (tgtblk);
2149 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2150 into a new pseudo which is a full word. */
2152 if (GET_MODE (srcreg) != BLKmode
2153 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2154 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2156 /* If the structure doesn't take up a whole number of words, see whether
2157 SRCREG is padded on the left or on the right. If it's on the left,
2158 set PADDING_CORRECTION to the number of bits to skip.
2160 In most ABIs, the structure will be returned at the least end of
2161 the register, which translates to right padding on little-endian
2162 targets and left padding on big-endian targets. The opposite
2163 holds if the structure is returned at the most significant
2164 end of the register. */
2165 if (bytes % UNITS_PER_WORD != 0
2166 && (targetm.calls.return_in_msb (type)
2168 : BYTES_BIG_ENDIAN))
2170 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2172 /* Copy the structure BITSIZE bites at a time.
2174 We could probably emit more efficient code for machines which do not use
2175 strict alignment, but it doesn't seem worth the effort at the current
2177 for (bitpos = 0, xbitpos = padding_correction;
2178 bitpos < bytes * BITS_PER_UNIT;
2179 bitpos += bitsize, xbitpos += bitsize)
2181 /* We need a new source operand each time xbitpos is on a
2182 word boundary and when xbitpos == padding_correction
2183 (the first time through). */
2184 if (xbitpos % BITS_PER_WORD == 0
2185 || xbitpos == padding_correction)
2186 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2189 /* We need a new destination operand each time bitpos is on
2191 if (bitpos % BITS_PER_WORD == 0)
2192 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2194 /* Use xbitpos for the source extraction (right justified) and
2195 xbitpos for the destination store (left justified). */
2196 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2197 extract_bit_field (src, bitsize,
2198 xbitpos % BITS_PER_WORD, 1,
2199 NULL_RTX, word_mode, word_mode,
2207 /* Add a USE expression for REG to the (possibly empty) list pointed
2208 to by CALL_FUSAGE. REG must denote a hard register. */
2211 use_reg (rtx *call_fusage, rtx reg)
2214 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2218 = gen_rtx_EXPR_LIST (VOIDmode,
2219 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2222 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2223 starting at REGNO. All of these registers must be hard registers. */
2226 use_regs (rtx *call_fusage, int regno, int nregs)
2230 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2233 for (i = 0; i < nregs; i++)
2234 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2237 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2238 PARALLEL REGS. This is for calls that pass values in multiple
2239 non-contiguous locations. The Irix 6 ABI has examples of this. */
2242 use_group_regs (rtx *call_fusage, rtx regs)
2246 for (i = 0; i < XVECLEN (regs, 0); i++)
2248 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2250 /* A NULL entry means the parameter goes both on the stack and in
2251 registers. This can also be a MEM for targets that pass values
2252 partially on the stack and partially in registers. */
2253 if (reg != 0 && REG_P (reg))
2254 use_reg (call_fusage, reg);
2259 /* Determine whether the LEN bytes generated by CONSTFUN can be
2260 stored to memory using several move instructions. CONSTFUNDATA is
2261 a pointer which will be passed as argument in every CONSTFUN call.
2262 ALIGN is maximum alignment we can assume. Return nonzero if a
2263 call to store_by_pieces should succeed. */
2266 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2267 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2268 void *constfundata, unsigned int align)
2270 unsigned HOST_WIDE_INT max_size, l;
2271 HOST_WIDE_INT offset = 0;
2272 enum machine_mode mode, tmode;
2273 enum insn_code icode;
2280 if (! STORE_BY_PIECES_P (len, align))
2283 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2284 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2285 align = MOVE_MAX * BITS_PER_UNIT;
2287 /* We would first store what we can in the largest integer mode, then go to
2288 successively smaller modes. */
2291 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2296 max_size = STORE_MAX_PIECES + 1;
2297 while (max_size > 1)
2299 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2300 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2301 if (GET_MODE_SIZE (tmode) < max_size)
2304 if (mode == VOIDmode)
2307 icode = mov_optab->handlers[(int) mode].insn_code;
2308 if (icode != CODE_FOR_nothing
2309 && align >= GET_MODE_ALIGNMENT (mode))
2311 unsigned int size = GET_MODE_SIZE (mode);
2318 cst = (*constfun) (constfundata, offset, mode);
2319 if (!LEGITIMATE_CONSTANT_P (cst))
2329 max_size = GET_MODE_SIZE (mode);
2332 /* The code above should have handled everything. */
2340 /* Generate several move instructions to store LEN bytes generated by
2341 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2342 pointer which will be passed as argument in every CONSTFUN call.
2343 ALIGN is maximum alignment we can assume.
2344 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2345 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2349 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2350 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2351 void *constfundata, unsigned int align, int endp)
2353 struct store_by_pieces data;
2362 if (! STORE_BY_PIECES_P (len, align))
2364 to = protect_from_queue (to, 1);
2365 data.constfun = constfun;
2366 data.constfundata = constfundata;
2369 store_by_pieces_1 (&data, align);
2380 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2381 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2383 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2386 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2393 to1 = adjust_address (data.to, QImode, data.offset);
2401 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2402 rtx with BLKmode). The caller must pass TO through protect_from_queue
2403 before calling. ALIGN is maximum alignment we can assume. */
2406 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2408 struct store_by_pieces data;
2413 data.constfun = clear_by_pieces_1;
2414 data.constfundata = NULL;
2417 store_by_pieces_1 (&data, align);
2420 /* Callback routine for clear_by_pieces.
2421 Return const0_rtx unconditionally. */
2424 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2425 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2426 enum machine_mode mode ATTRIBUTE_UNUSED)
2431 /* Subroutine of clear_by_pieces and store_by_pieces.
2432 Generate several move instructions to store LEN bytes of block TO. (A MEM
2433 rtx with BLKmode). The caller must pass TO through protect_from_queue
2434 before calling. ALIGN is maximum alignment we can assume. */
2437 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2438 unsigned int align ATTRIBUTE_UNUSED)
2440 rtx to_addr = XEXP (data->to, 0);
2441 unsigned HOST_WIDE_INT max_size = STORE_MAX_PIECES + 1;
2442 enum machine_mode mode = VOIDmode, tmode;
2443 enum insn_code icode;
2446 data->to_addr = to_addr;
2448 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2449 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2451 data->explicit_inc_to = 0;
2453 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2455 data->offset = data->len;
2457 /* If storing requires more than two move insns,
2458 copy addresses to registers (to make displacements shorter)
2459 and use post-increment if available. */
2460 if (!data->autinc_to
2461 && move_by_pieces_ninsns (data->len, align) > 2)
2463 /* Determine the main mode we'll be using. */
2464 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2465 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2466 if (GET_MODE_SIZE (tmode) < max_size)
2469 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2471 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2472 data->autinc_to = 1;
2473 data->explicit_inc_to = -1;
2476 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2477 && ! data->autinc_to)
2479 data->to_addr = copy_addr_to_reg (to_addr);
2480 data->autinc_to = 1;
2481 data->explicit_inc_to = 1;
2484 if ( !data->autinc_to && CONSTANT_P (to_addr))
2485 data->to_addr = copy_addr_to_reg (to_addr);
2488 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2489 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2490 align = MOVE_MAX * BITS_PER_UNIT;
2492 /* First store what we can in the largest integer mode, then go to
2493 successively smaller modes. */
2495 while (max_size > 1)
2497 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2498 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2499 if (GET_MODE_SIZE (tmode) < max_size)
2502 if (mode == VOIDmode)
2505 icode = mov_optab->handlers[(int) mode].insn_code;
2506 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2507 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2509 max_size = GET_MODE_SIZE (mode);
2512 /* The code above should have handled everything. */
2517 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2518 with move instructions for mode MODE. GENFUN is the gen_... function
2519 to make a move insn for that mode. DATA has all the other info. */
2522 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2523 struct store_by_pieces *data)
2525 unsigned int size = GET_MODE_SIZE (mode);
2528 while (data->len >= size)
2531 data->offset -= size;
2533 if (data->autinc_to)
2534 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2537 to1 = adjust_address (data->to, mode, data->offset);
2539 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2540 emit_insn (gen_add2_insn (data->to_addr,
2541 GEN_INT (-(HOST_WIDE_INT) size)));
2543 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2544 emit_insn ((*genfun) (to1, cst));
2546 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2547 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2549 if (! data->reverse)
2550 data->offset += size;
2556 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2557 its length in bytes. */
2560 clear_storage (rtx object, rtx size)
2563 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2564 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2566 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2567 just move a zero. Otherwise, do this a piece at a time. */
2568 if (GET_MODE (object) != BLKmode
2569 && GET_CODE (size) == CONST_INT
2570 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
2571 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2574 object = protect_from_queue (object, 1);
2575 size = protect_from_queue (size, 0);
2577 if (size == const0_rtx)
2579 else if (GET_CODE (size) == CONST_INT
2580 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2581 clear_by_pieces (object, INTVAL (size), align);
2582 else if (clear_storage_via_clrstr (object, size, align))
2585 retval = clear_storage_via_libcall (object, size);
2591 /* A subroutine of clear_storage. Expand a clrstr pattern;
2592 return true if successful. */
2595 clear_storage_via_clrstr (rtx object, rtx size, unsigned int align)
2597 /* Try the most limited insn first, because there's no point
2598 including more than one in the machine description unless
2599 the more limited one has some advantage. */
2601 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2602 enum machine_mode mode;
2604 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2605 mode = GET_MODE_WIDER_MODE (mode))
2607 enum insn_code code = clrstr_optab[(int) mode];
2608 insn_operand_predicate_fn pred;
2610 if (code != CODE_FOR_nothing
2611 /* We don't need MODE to be narrower than
2612 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2613 the mode mask, as it is returned by the macro, it will
2614 definitely be less than the actual mode mask. */
2615 && ((GET_CODE (size) == CONST_INT
2616 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2617 <= (GET_MODE_MASK (mode) >> 1)))
2618 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2619 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2620 || (*pred) (object, BLKmode))
2621 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2622 || (*pred) (opalign, VOIDmode)))
2625 rtx last = get_last_insn ();
2628 op1 = convert_to_mode (mode, size, 1);
2629 pred = insn_data[(int) code].operand[1].predicate;
2630 if (pred != 0 && ! (*pred) (op1, mode))
2631 op1 = copy_to_mode_reg (mode, op1);
2633 pat = GEN_FCN ((int) code) (object, op1, opalign);
2640 delete_insns_since (last);
2647 /* A subroutine of clear_storage. Expand a call to memset or bzero.
2648 Return the return value of memset, 0 otherwise. */
2651 clear_storage_via_libcall (rtx object, rtx size)
2653 tree call_expr, arg_list, fn, object_tree, size_tree;
2654 enum machine_mode size_mode;
2657 /* OBJECT or SIZE may have been passed through protect_from_queue.
2659 It is unsafe to save the value generated by protect_from_queue
2660 and reuse it later. Consider what happens if emit_queue is
2661 called before the return value from protect_from_queue is used.
2663 Expansion of the CALL_EXPR below will call emit_queue before
2664 we are finished emitting RTL for argument setup. So if we are
2665 not careful we could get the wrong value for an argument.
2667 To avoid this problem we go ahead and emit code to copy OBJECT
2668 and SIZE into new pseudos. We can then place those new pseudos
2669 into an RTL_EXPR and use them later, even after a call to
2672 Note this is not strictly needed for library calls since they
2673 do not call emit_queue before loading their arguments. However,
2674 we may need to have library calls call emit_queue in the future
2675 since failing to do so could cause problems for targets which
2676 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2678 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2680 if (TARGET_MEM_FUNCTIONS)
2681 size_mode = TYPE_MODE (sizetype);
2683 size_mode = TYPE_MODE (unsigned_type_node);
2684 size = convert_to_mode (size_mode, size, 1);
2685 size = copy_to_mode_reg (size_mode, size);
2687 /* It is incorrect to use the libcall calling conventions to call
2688 memset in this context. This could be a user call to memset and
2689 the user may wish to examine the return value from memset. For
2690 targets where libcalls and normal calls have different conventions
2691 for returning pointers, we could end up generating incorrect code.
2693 For convenience, we generate the call to bzero this way as well. */
2695 object_tree = make_tree (ptr_type_node, object);
2696 if (TARGET_MEM_FUNCTIONS)
2697 size_tree = make_tree (sizetype, size);
2699 size_tree = make_tree (unsigned_type_node, size);
2701 fn = clear_storage_libcall_fn (true);
2702 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
2703 if (TARGET_MEM_FUNCTIONS)
2704 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
2705 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
2707 /* Now we have to build up the CALL_EXPR itself. */
2708 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2709 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2710 call_expr, arg_list, NULL_TREE);
2712 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2714 /* If we are initializing a readonly value, show the above call
2715 clobbered it. Otherwise, a load from it may erroneously be
2716 hoisted from a loop. */
2717 if (RTX_UNCHANGING_P (object))
2718 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
2720 return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
2723 /* A subroutine of clear_storage_via_libcall. Create the tree node
2724 for the function we use for block clears. The first time FOR_CALL
2725 is true, we call assemble_external. */
2727 static GTY(()) tree block_clear_fn;
2730 init_block_clear_fn (const char *asmspec)
2732 if (!block_clear_fn)
2736 if (TARGET_MEM_FUNCTIONS)
2738 fn = get_identifier ("memset");
2739 args = build_function_type_list (ptr_type_node, ptr_type_node,
2740 integer_type_node, sizetype,
2745 fn = get_identifier ("bzero");
2746 args = build_function_type_list (void_type_node, ptr_type_node,
2747 unsigned_type_node, NULL_TREE);
2750 fn = build_decl (FUNCTION_DECL, fn, args);
2751 DECL_EXTERNAL (fn) = 1;
2752 TREE_PUBLIC (fn) = 1;
2753 DECL_ARTIFICIAL (fn) = 1;
2754 TREE_NOTHROW (fn) = 1;
2756 block_clear_fn = fn;
2761 SET_DECL_RTL (block_clear_fn, NULL_RTX);
2762 SET_DECL_ASSEMBLER_NAME (block_clear_fn, get_identifier (asmspec));
2767 clear_storage_libcall_fn (int for_call)
2769 static bool emitted_extern;
2771 if (!block_clear_fn)
2772 init_block_clear_fn (NULL);
2774 if (for_call && !emitted_extern)
2776 emitted_extern = true;
2777 make_decl_rtl (block_clear_fn, NULL);
2778 assemble_external (block_clear_fn);
2781 return block_clear_fn;
2784 /* Generate code to copy Y into X.
2785 Both Y and X must have the same mode, except that
2786 Y can be a constant with VOIDmode.
2787 This mode cannot be BLKmode; use emit_block_move for that.
2789 Return the last instruction emitted. */
2792 emit_move_insn (rtx x, rtx y)
2794 enum machine_mode mode = GET_MODE (x);
2795 rtx y_cst = NULL_RTX;
2798 x = protect_from_queue (x, 1);
2799 y = protect_from_queue (y, 0);
2801 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2807 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
2808 && (last_insn = compress_float_constant (x, y)))
2813 if (!LEGITIMATE_CONSTANT_P (y))
2815 y = force_const_mem (mode, y);
2817 /* If the target's cannot_force_const_mem prevented the spill,
2818 assume that the target's move expanders will also take care
2819 of the non-legitimate constant. */
2825 /* If X or Y are memory references, verify that their addresses are valid
2827 if (GET_CODE (x) == MEM
2828 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2829 && ! push_operand (x, GET_MODE (x)))
2831 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2832 x = validize_mem (x);
2834 if (GET_CODE (y) == MEM
2835 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2837 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2838 y = validize_mem (y);
2840 if (mode == BLKmode)
2843 last_insn = emit_move_insn_1 (x, y);
2845 if (y_cst && REG_P (x)
2846 && (set = single_set (last_insn)) != NULL_RTX
2847 && SET_DEST (set) == x
2848 && ! rtx_equal_p (y_cst, SET_SRC (set)))
2849 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
2854 /* Low level part of emit_move_insn.
2855 Called just like emit_move_insn, but assumes X and Y
2856 are basically valid. */
2859 emit_move_insn_1 (rtx x, rtx y)
2861 enum machine_mode mode = GET_MODE (x);
2862 enum machine_mode submode;
2863 enum mode_class class = GET_MODE_CLASS (mode);
2865 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
2868 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2870 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2872 /* Expand complex moves by moving real part and imag part, if possible. */
2873 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2874 && BLKmode != (submode = GET_MODE_INNER (mode))
2875 && (mov_optab->handlers[(int) submode].insn_code
2876 != CODE_FOR_nothing))
2878 /* Don't split destination if it is a stack push. */
2879 int stack = push_operand (x, GET_MODE (x));
2881 #ifdef PUSH_ROUNDING
2882 /* In case we output to the stack, but the size is smaller than the
2883 machine can push exactly, we need to use move instructions. */
2885 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
2886 != GET_MODE_SIZE (submode)))
2889 HOST_WIDE_INT offset1, offset2;
2891 /* Do not use anti_adjust_stack, since we don't want to update
2892 stack_pointer_delta. */
2893 temp = expand_binop (Pmode,
2894 #ifdef STACK_GROWS_DOWNWARD
2902 (GET_MODE_SIZE (GET_MODE (x)))),
2903 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
2905 if (temp != stack_pointer_rtx)
2906 emit_move_insn (stack_pointer_rtx, temp);
2908 #ifdef STACK_GROWS_DOWNWARD
2910 offset2 = GET_MODE_SIZE (submode);
2912 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2913 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2914 + GET_MODE_SIZE (submode));
2917 emit_move_insn (change_address (x, submode,
2918 gen_rtx_PLUS (Pmode,
2920 GEN_INT (offset1))),
2921 gen_realpart (submode, y));
2922 emit_move_insn (change_address (x, submode,
2923 gen_rtx_PLUS (Pmode,
2925 GEN_INT (offset2))),
2926 gen_imagpart (submode, y));
2930 /* If this is a stack, push the highpart first, so it
2931 will be in the argument order.
2933 In that case, change_address is used only to convert
2934 the mode, not to change the address. */
2937 /* Note that the real part always precedes the imag part in memory
2938 regardless of machine's endianness. */
2939 #ifdef STACK_GROWS_DOWNWARD
2940 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2941 gen_imagpart (submode, y));
2942 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2943 gen_realpart (submode, y));
2945 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2946 gen_realpart (submode, y));
2947 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2948 gen_imagpart (submode, y));
2953 rtx realpart_x, realpart_y;
2954 rtx imagpart_x, imagpart_y;
2956 /* If this is a complex value with each part being smaller than a
2957 word, the usual calling sequence will likely pack the pieces into
2958 a single register. Unfortunately, SUBREG of hard registers only
2959 deals in terms of words, so we have a problem converting input
2960 arguments to the CONCAT of two registers that is used elsewhere
2961 for complex values. If this is before reload, we can copy it into
2962 memory and reload. FIXME, we should see about using extract and
2963 insert on integer registers, but complex short and complex char
2964 variables should be rarely used. */
2965 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2966 && (reload_in_progress | reload_completed) == 0)
2969 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2971 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2973 if (packed_dest_p || packed_src_p)
2975 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2976 ? MODE_FLOAT : MODE_INT);
2978 enum machine_mode reg_mode
2979 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2981 if (reg_mode != BLKmode)
2983 rtx mem = assign_stack_temp (reg_mode,
2984 GET_MODE_SIZE (mode), 0);
2985 rtx cmem = adjust_address (mem, mode, 0);
2989 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2991 emit_move_insn_1 (cmem, y);
2992 return emit_move_insn_1 (sreg, mem);
2996 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2998 emit_move_insn_1 (mem, sreg);
2999 return emit_move_insn_1 (x, cmem);
3005 realpart_x = gen_realpart (submode, x);
3006 realpart_y = gen_realpart (submode, y);
3007 imagpart_x = gen_imagpart (submode, x);
3008 imagpart_y = gen_imagpart (submode, y);
3010 /* Show the output dies here. This is necessary for SUBREGs
3011 of pseudos since we cannot track their lifetimes correctly;
3012 hard regs shouldn't appear here except as return values.
3013 We never want to emit such a clobber after reload. */
3015 && ! (reload_in_progress || reload_completed)
3016 && (GET_CODE (realpart_x) == SUBREG
3017 || GET_CODE (imagpart_x) == SUBREG))
3018 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3020 emit_move_insn (realpart_x, realpart_y);
3021 emit_move_insn (imagpart_x, imagpart_y);
3024 return get_last_insn ();
3027 /* Handle MODE_CC modes: If we don't have a special move insn for this mode,
3028 find a mode to do it in. If we have a movcc, use it. Otherwise,
3029 find the MODE_INT mode of the same width. */
3030 else if (GET_MODE_CLASS (mode) == MODE_CC
3031 && mov_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
3033 enum insn_code insn_code;
3034 enum machine_mode tmode = VOIDmode;
3038 && mov_optab->handlers[(int) CCmode].insn_code != CODE_FOR_nothing)
3041 for (tmode = QImode; tmode != VOIDmode;
3042 tmode = GET_MODE_WIDER_MODE (tmode))
3043 if (GET_MODE_SIZE (tmode) == GET_MODE_SIZE (mode))
3046 if (tmode == VOIDmode)
3049 /* Get X and Y in TMODE. We can't use gen_lowpart here because it
3050 may call change_address which is not appropriate if we were
3051 called when a reload was in progress. We don't have to worry
3052 about changing the address since the size in bytes is supposed to
3053 be the same. Copy the MEM to change the mode and move any
3054 substitutions from the old MEM to the new one. */
3056 if (reload_in_progress)
3058 x = gen_lowpart_common (tmode, x1);
3059 if (x == 0 && GET_CODE (x1) == MEM)
3061 x = adjust_address_nv (x1, tmode, 0);
3062 copy_replacements (x1, x);
3065 y = gen_lowpart_common (tmode, y1);
3066 if (y == 0 && GET_CODE (y1) == MEM)
3068 y = adjust_address_nv (y1, tmode, 0);
3069 copy_replacements (y1, y);
3074 x = gen_lowpart (tmode, x);
3075 y = gen_lowpart (tmode, y);
3078 insn_code = mov_optab->handlers[(int) tmode].insn_code;
3079 return emit_insn (GEN_FCN (insn_code) (x, y));
3082 /* Try using a move pattern for the corresponding integer mode. This is
3083 only safe when simplify_subreg can convert MODE constants into integer
3084 constants. At present, it can only do this reliably if the value
3085 fits within a HOST_WIDE_INT. */
3086 else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3087 && (submode = int_mode_for_mode (mode)) != BLKmode
3088 && mov_optab->handlers[submode].insn_code != CODE_FOR_nothing)
3089 return emit_insn (GEN_FCN (mov_optab->handlers[submode].insn_code)
3090 (simplify_gen_subreg (submode, x, mode, 0),
3091 simplify_gen_subreg (submode, y, mode, 0)));
3093 /* This will handle any multi-word or full-word mode that lacks a move_insn
3094 pattern. However, you will get better code if you define such patterns,
3095 even if they must turn into multiple assembler instructions. */
3096 else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
3103 #ifdef PUSH_ROUNDING
3105 /* If X is a push on the stack, do the push now and replace
3106 X with a reference to the stack pointer. */
3107 if (push_operand (x, GET_MODE (x)))
3112 /* Do not use anti_adjust_stack, since we don't want to update
3113 stack_pointer_delta. */
3114 temp = expand_binop (Pmode,
3115 #ifdef STACK_GROWS_DOWNWARD
3123 (GET_MODE_SIZE (GET_MODE (x)))),
3124 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3126 if (temp != stack_pointer_rtx)
3127 emit_move_insn (stack_pointer_rtx, temp);
3129 code = GET_CODE (XEXP (x, 0));
3131 /* Just hope that small offsets off SP are OK. */
3132 if (code == POST_INC)
3133 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3134 GEN_INT (-((HOST_WIDE_INT)
3135 GET_MODE_SIZE (GET_MODE (x)))));
3136 else if (code == POST_DEC)
3137 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3138 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3140 temp = stack_pointer_rtx;
3142 x = change_address (x, VOIDmode, temp);
3146 /* If we are in reload, see if either operand is a MEM whose address
3147 is scheduled for replacement. */
3148 if (reload_in_progress && GET_CODE (x) == MEM
3149 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3150 x = replace_equiv_address_nv (x, inner);
3151 if (reload_in_progress && GET_CODE (y) == MEM
3152 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3153 y = replace_equiv_address_nv (y, inner);
3159 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3162 rtx xpart = operand_subword (x, i, 1, mode);
3163 rtx ypart = operand_subword (y, i, 1, mode);
3165 /* If we can't get a part of Y, put Y into memory if it is a
3166 constant. Otherwise, force it into a register. If we still
3167 can't get a part of Y, abort. */
3168 if (ypart == 0 && CONSTANT_P (y))
3170 y = force_const_mem (mode, y);
3171 ypart = operand_subword (y, i, 1, mode);
3173 else if (ypart == 0)
3174 ypart = operand_subword_force (y, i, mode);
3176 if (xpart == 0 || ypart == 0)
3179 need_clobber |= (GET_CODE (xpart) == SUBREG);
3181 last_insn = emit_move_insn (xpart, ypart);
3187 /* Show the output dies here. This is necessary for SUBREGs
3188 of pseudos since we cannot track their lifetimes correctly;
3189 hard regs shouldn't appear here except as return values.
3190 We never want to emit such a clobber after reload. */
3192 && ! (reload_in_progress || reload_completed)
3193 && need_clobber != 0)
3194 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3204 /* If Y is representable exactly in a narrower mode, and the target can
3205 perform the extension directly from constant or memory, then emit the
3206 move as an extension. */
3209 compress_float_constant (rtx x, rtx y)
3211 enum machine_mode dstmode = GET_MODE (x);
3212 enum machine_mode orig_srcmode = GET_MODE (y);
3213 enum machine_mode srcmode;
3216 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3218 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3219 srcmode != orig_srcmode;
3220 srcmode = GET_MODE_WIDER_MODE (srcmode))
3223 rtx trunc_y, last_insn;
3225 /* Skip if the target can't extend this way. */
3226 ic = can_extend_p (dstmode, srcmode, 0);
3227 if (ic == CODE_FOR_nothing)
3230 /* Skip if the narrowed value isn't exact. */
3231 if (! exact_real_truncate (srcmode, &r))
3234 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3236 if (LEGITIMATE_CONSTANT_P (trunc_y))
3238 /* Skip if the target needs extra instructions to perform
3240 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3243 else if (float_extend_from_mem[dstmode][srcmode])
3244 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
3248 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3249 last_insn = get_last_insn ();
3252 set_unique_reg_note (last_insn, REG_EQUAL, y);
3260 /* Pushing data onto the stack. */
3262 /* Push a block of length SIZE (perhaps variable)
3263 and return an rtx to address the beginning of the block.
3264 Note that it is not possible for the value returned to be a QUEUED.
3265 The value may be virtual_outgoing_args_rtx.
3267 EXTRA is the number of bytes of padding to push in addition to SIZE.
3268 BELOW nonzero means this padding comes at low addresses;
3269 otherwise, the padding comes at high addresses. */
3272 push_block (rtx size, int extra, int below)
3276 size = convert_modes (Pmode, ptr_mode, size, 1);
3277 if (CONSTANT_P (size))
3278 anti_adjust_stack (plus_constant (size, extra));
3279 else if (REG_P (size) && extra == 0)
3280 anti_adjust_stack (size);
3283 temp = copy_to_mode_reg (Pmode, size);
3285 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3286 temp, 0, OPTAB_LIB_WIDEN);
3287 anti_adjust_stack (temp);
3290 #ifndef STACK_GROWS_DOWNWARD
3296 temp = virtual_outgoing_args_rtx;
3297 if (extra != 0 && below)
3298 temp = plus_constant (temp, extra);
3302 if (GET_CODE (size) == CONST_INT)
3303 temp = plus_constant (virtual_outgoing_args_rtx,
3304 -INTVAL (size) - (below ? 0 : extra));
3305 else if (extra != 0 && !below)
3306 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3307 negate_rtx (Pmode, plus_constant (size, extra)));
3309 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3310 negate_rtx (Pmode, size));
3313 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3316 #ifdef PUSH_ROUNDING
3318 /* Emit single push insn. */
3321 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3324 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3326 enum insn_code icode;
3327 insn_operand_predicate_fn pred;
3329 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3330 /* If there is push pattern, use it. Otherwise try old way of throwing
3331 MEM representing push operation to move expander. */
3332 icode = push_optab->handlers[(int) mode].insn_code;
3333 if (icode != CODE_FOR_nothing)
3335 if (((pred = insn_data[(int) icode].operand[0].predicate)
3336 && !((*pred) (x, mode))))
3337 x = force_reg (mode, x);
3338 emit_insn (GEN_FCN (icode) (x));
3341 if (GET_MODE_SIZE (mode) == rounded_size)
3342 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3343 /* If we are to pad downward, adjust the stack pointer first and
3344 then store X into the stack location using an offset. This is
3345 because emit_move_insn does not know how to pad; it does not have
3347 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3349 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3350 HOST_WIDE_INT offset;
3352 emit_move_insn (stack_pointer_rtx,
3353 expand_binop (Pmode,
3354 #ifdef STACK_GROWS_DOWNWARD
3360 GEN_INT (rounded_size),
3361 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3363 offset = (HOST_WIDE_INT) padding_size;
3364 #ifdef STACK_GROWS_DOWNWARD
3365 if (STACK_PUSH_CODE == POST_DEC)
3366 /* We have already decremented the stack pointer, so get the
3368 offset += (HOST_WIDE_INT) rounded_size;
3370 if (STACK_PUSH_CODE == POST_INC)
3371 /* We have already incremented the stack pointer, so get the
3373 offset -= (HOST_WIDE_INT) rounded_size;
3375 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3379 #ifdef STACK_GROWS_DOWNWARD
3380 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3381 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3382 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3384 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3385 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3386 GEN_INT (rounded_size));
3388 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3391 dest = gen_rtx_MEM (mode, dest_addr);
3395 set_mem_attributes (dest, type, 1);
3397 if (flag_optimize_sibling_calls)
3398 /* Function incoming arguments may overlap with sibling call
3399 outgoing arguments and we cannot allow reordering of reads
3400 from function arguments with stores to outgoing arguments
3401 of sibling calls. */
3402 set_mem_alias_set (dest, 0);
3404 emit_move_insn (dest, x);
3408 /* Generate code to push X onto the stack, assuming it has mode MODE and
3410 MODE is redundant except when X is a CONST_INT (since they don't
3412 SIZE is an rtx for the size of data to be copied (in bytes),
3413 needed only if X is BLKmode.
3415 ALIGN (in bits) is maximum alignment we can assume.
3417 If PARTIAL and REG are both nonzero, then copy that many of the first
3418 words of X into registers starting with REG, and push the rest of X.
3419 The amount of space pushed is decreased by PARTIAL words,
3420 rounded *down* to a multiple of PARM_BOUNDARY.
3421 REG must be a hard register in this case.
3422 If REG is zero but PARTIAL is not, take any all others actions for an
3423 argument partially in registers, but do not actually load any
3426 EXTRA is the amount in bytes of extra space to leave next to this arg.
3427 This is ignored if an argument block has already been allocated.
3429 On a machine that lacks real push insns, ARGS_ADDR is the address of
3430 the bottom of the argument block for this call. We use indexing off there
3431 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3432 argument block has not been preallocated.
3434 ARGS_SO_FAR is the size of args previously pushed for this call.
3436 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3437 for arguments passed in registers. If nonzero, it will be the number
3438 of bytes required. */
3441 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3442 unsigned int align, int partial, rtx reg, int extra,
3443 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3447 enum direction stack_direction
3448 #ifdef STACK_GROWS_DOWNWARD
3454 /* Decide where to pad the argument: `downward' for below,
3455 `upward' for above, or `none' for don't pad it.
3456 Default is below for small data on big-endian machines; else above. */
3457 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3459 /* Invert direction if stack is post-decrement.
3461 if (STACK_PUSH_CODE == POST_DEC)
3462 if (where_pad != none)
3463 where_pad = (where_pad == downward ? upward : downward);
3465 xinner = x = protect_from_queue (x, 0);
3467 if (mode == BLKmode)
3469 /* Copy a block into the stack, entirely or partially. */
3472 int used = partial * UNITS_PER_WORD;
3476 if (reg && GET_CODE (reg) == PARALLEL)
3478 /* Use the size of the elt to compute offset. */
3479 rtx elt = XEXP (XVECEXP (reg, 0, 0), 0);
3480 used = partial * GET_MODE_SIZE (GET_MODE (elt));
3481 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3484 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3491 /* USED is now the # of bytes we need not copy to the stack
3492 because registers will take care of them. */
3495 xinner = adjust_address (xinner, BLKmode, used);
3497 /* If the partial register-part of the arg counts in its stack size,
3498 skip the part of stack space corresponding to the registers.
3499 Otherwise, start copying to the beginning of the stack space,
3500 by setting SKIP to 0. */
3501 skip = (reg_parm_stack_space == 0) ? 0 : used;
3503 #ifdef PUSH_ROUNDING
3504 /* Do it with several push insns if that doesn't take lots of insns
3505 and if there is no difficulty with push insns that skip bytes
3506 on the stack for alignment purposes. */
3509 && GET_CODE (size) == CONST_INT
3511 && MEM_ALIGN (xinner) >= align
3512 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3513 /* Here we avoid the case of a structure whose weak alignment
3514 forces many pushes of a small amount of data,
3515 and such small pushes do rounding that causes trouble. */
3516 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3517 || align >= BIGGEST_ALIGNMENT
3518 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3519 == (align / BITS_PER_UNIT)))
3520 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3522 /* Push padding now if padding above and stack grows down,
3523 or if padding below and stack grows up.
3524 But if space already allocated, this has already been done. */
3525 if (extra && args_addr == 0
3526 && where_pad != none && where_pad != stack_direction)
3527 anti_adjust_stack (GEN_INT (extra));
3529 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3532 #endif /* PUSH_ROUNDING */
3536 /* Otherwise make space on the stack and copy the data
3537 to the address of that space. */
3539 /* Deduct words put into registers from the size we must copy. */
3542 if (GET_CODE (size) == CONST_INT)
3543 size = GEN_INT (INTVAL (size) - used);
3545 size = expand_binop (GET_MODE (size), sub_optab, size,
3546 GEN_INT (used), NULL_RTX, 0,
3550 /* Get the address of the stack space.
3551 In this case, we do not deal with EXTRA separately.
3552 A single stack adjust will do. */
3555 temp = push_block (size, extra, where_pad == downward);
3558 else if (GET_CODE (args_so_far) == CONST_INT)
3559 temp = memory_address (BLKmode,
3560 plus_constant (args_addr,
3561 skip + INTVAL (args_so_far)));
3563 temp = memory_address (BLKmode,
3564 plus_constant (gen_rtx_PLUS (Pmode,
3569 if (!ACCUMULATE_OUTGOING_ARGS)
3571 /* If the source is referenced relative to the stack pointer,
3572 copy it to another register to stabilize it. We do not need
3573 to do this if we know that we won't be changing sp. */
3575 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3576 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3577 temp = copy_to_reg (temp);
3580 target = gen_rtx_MEM (BLKmode, temp);
3584 set_mem_attributes (target, type, 1);
3585 /* Function incoming arguments may overlap with sibling call
3586 outgoing arguments and we cannot allow reordering of reads
3587 from function arguments with stores to outgoing arguments
3588 of sibling calls. */
3589 set_mem_alias_set (target, 0);
3592 /* ALIGN may well be better aligned than TYPE, e.g. due to
3593 PARM_BOUNDARY. Assume the caller isn't lying. */
3594 set_mem_align (target, align);
3596 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3599 else if (partial > 0)
3601 /* Scalar partly in registers. */
3603 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3606 /* # words of start of argument
3607 that we must make space for but need not store. */
3608 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3609 int args_offset = INTVAL (args_so_far);
3612 /* Push padding now if padding above and stack grows down,
3613 or if padding below and stack grows up.
3614 But if space already allocated, this has already been done. */
3615 if (extra && args_addr == 0
3616 && where_pad != none && where_pad != stack_direction)
3617 anti_adjust_stack (GEN_INT (extra));
3619 /* If we make space by pushing it, we might as well push
3620 the real data. Otherwise, we can leave OFFSET nonzero
3621 and leave the space uninitialized. */
3625 /* Now NOT_STACK gets the number of words that we don't need to
3626 allocate on the stack. */
3627 not_stack = partial - offset;
3629 /* If the partial register-part of the arg counts in its stack size,
3630 skip the part of stack space corresponding to the registers.
3631 Otherwise, start copying to the beginning of the stack space,
3632 by setting SKIP to 0. */
3633 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3635 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3636 x = validize_mem (force_const_mem (mode, x));
3638 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3639 SUBREGs of such registers are not allowed. */
3640 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3641 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3642 x = copy_to_reg (x);
3644 /* Loop over all the words allocated on the stack for this arg. */
3645 /* We can do it by words, because any scalar bigger than a word
3646 has a size a multiple of a word. */
3647 #ifndef PUSH_ARGS_REVERSED
3648 for (i = not_stack; i < size; i++)
3650 for (i = size - 1; i >= not_stack; i--)
3652 if (i >= not_stack + offset)
3653 emit_push_insn (operand_subword_force (x, i, mode),
3654 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3656 GEN_INT (args_offset + ((i - not_stack + skip)
3658 reg_parm_stack_space, alignment_pad);
3665 /* Push padding now if padding above and stack grows down,
3666 or if padding below and stack grows up.
3667 But if space already allocated, this has already been done. */
3668 if (extra && args_addr == 0
3669 && where_pad != none && where_pad != stack_direction)
3670 anti_adjust_stack (GEN_INT (extra));
3672 #ifdef PUSH_ROUNDING
3673 if (args_addr == 0 && PUSH_ARGS)
3674 emit_single_push_insn (mode, x, type);
3678 if (GET_CODE (args_so_far) == CONST_INT)
3680 = memory_address (mode,
3681 plus_constant (args_addr,
3682 INTVAL (args_so_far)));
3684 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3686 dest = gen_rtx_MEM (mode, addr);
3689 set_mem_attributes (dest, type, 1);
3690 /* Function incoming arguments may overlap with sibling call
3691 outgoing arguments and we cannot allow reordering of reads
3692 from function arguments with stores to outgoing arguments
3693 of sibling calls. */
3694 set_mem_alias_set (dest, 0);
3697 emit_move_insn (dest, x);
3701 /* If part should go in registers, copy that part
3702 into the appropriate registers. Do this now, at the end,
3703 since mem-to-mem copies above may do function calls. */
3704 if (partial > 0 && reg != 0)
3706 /* Handle calls that pass values in multiple non-contiguous locations.
3707 The Irix 6 ABI has examples of this. */
3708 if (GET_CODE (reg) == PARALLEL)
3709 emit_group_load (reg, x, type, -1);
3711 move_block_to_reg (REGNO (reg), x, partial, mode);
3714 if (extra && args_addr == 0 && where_pad == stack_direction)
3715 anti_adjust_stack (GEN_INT (extra));
3717 if (alignment_pad && args_addr == 0)
3718 anti_adjust_stack (alignment_pad);
3721 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3725 get_subtarget (rtx x)
3728 /* Only registers can be subtargets. */
3730 /* If the register is readonly, it can't be set more than once. */
3731 || RTX_UNCHANGING_P (x)
3732 /* Don't use hard regs to avoid extending their life. */
3733 || REGNO (x) < FIRST_PSEUDO_REGISTER
3734 /* Avoid subtargets inside loops,
3735 since they hide some invariant expressions. */
3736 || preserve_subexpressions_p ())
3740 /* Expand an assignment that stores the value of FROM into TO.
3741 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3742 (This may contain a QUEUED rtx;
3743 if the value is constant, this rtx is a constant.)
3744 Otherwise, the returned value is NULL_RTX. */
3747 expand_assignment (tree to, tree from, int want_value)
3752 /* Don't crash if the lhs of the assignment was erroneous. */
3754 if (TREE_CODE (to) == ERROR_MARK)
3756 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3757 return want_value ? result : NULL_RTX;
3760 /* Assignment of a structure component needs special treatment
3761 if the structure component's rtx is not simply a MEM.
3762 Assignment of an array element at a constant index, and assignment of
3763 an array element in an unaligned packed structure field, has the same
3766 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3767 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF
3768 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
3770 enum machine_mode mode1;
3771 HOST_WIDE_INT bitsize, bitpos;
3779 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3780 &unsignedp, &volatilep);
3782 /* If we are going to use store_bit_field and extract_bit_field,
3783 make sure to_rtx will be safe for multiple use. */
3785 if (mode1 == VOIDmode && want_value)
3786 tem = stabilize_reference (tem);
3788 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3792 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
3794 if (GET_CODE (to_rtx) != MEM)
3797 #ifdef POINTERS_EXTEND_UNSIGNED
3798 if (GET_MODE (offset_rtx) != Pmode)
3799 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
3801 if (GET_MODE (offset_rtx) != ptr_mode)
3802 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3805 /* A constant address in TO_RTX can have VOIDmode, we must not try
3806 to call force_reg for that case. Avoid that case. */
3807 if (GET_CODE (to_rtx) == MEM
3808 && GET_MODE (to_rtx) == BLKmode
3809 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3811 && (bitpos % bitsize) == 0
3812 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3813 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
3815 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3819 to_rtx = offset_address (to_rtx, offset_rtx,
3820 highest_pow2_factor_for_target (to,
3824 if (GET_CODE (to_rtx) == MEM)
3826 /* If the field is at offset zero, we could have been given the
3827 DECL_RTX of the parent struct. Don't munge it. */
3828 to_rtx = shallow_copy_rtx (to_rtx);
3830 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
3833 /* Deal with volatile and readonly fields. The former is only done
3834 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3835 if (volatilep && GET_CODE (to_rtx) == MEM)
3837 if (to_rtx == orig_to_rtx)
3838 to_rtx = copy_rtx (to_rtx);
3839 MEM_VOLATILE_P (to_rtx) = 1;
3842 if (TREE_CODE (to) == COMPONENT_REF
3843 && TREE_READONLY (TREE_OPERAND (to, 1))
3844 /* We can't assert that a MEM won't be set more than once
3845 if the component is not addressable because another
3846 non-addressable component may be referenced by the same MEM. */
3847 && ! (GET_CODE (to_rtx) == MEM && ! can_address_p (to)))
3849 if (to_rtx == orig_to_rtx)
3850 to_rtx = copy_rtx (to_rtx);
3851 RTX_UNCHANGING_P (to_rtx) = 1;
3854 if (GET_CODE (to_rtx) == MEM && ! can_address_p (to))
3856 if (to_rtx == orig_to_rtx)
3857 to_rtx = copy_rtx (to_rtx);
3858 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3861 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3863 /* Spurious cast for HPUX compiler. */
3864 ? ((enum machine_mode)
3865 TYPE_MODE (TREE_TYPE (to)))
3867 unsignedp, TREE_TYPE (tem), get_alias_set (to));
3869 preserve_temp_slots (result);
3873 /* If the value is meaningful, convert RESULT to the proper mode.
3874 Otherwise, return nothing. */
3875 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3876 TYPE_MODE (TREE_TYPE (from)),
3878 TYPE_UNSIGNED (TREE_TYPE (to)))
3882 /* If the rhs is a function call and its value is not an aggregate,
3883 call the function before we start to compute the lhs.
3884 This is needed for correct code for cases such as
3885 val = setjmp (buf) on machines where reference to val
3886 requires loading up part of an address in a separate insn.
3888 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3889 since it might be a promoted variable where the zero- or sign- extension
3890 needs to be done. Handling this in the normal way is safe because no
3891 computation is done before the call. */
3892 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
3893 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3894 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3895 && REG_P (DECL_RTL (to))))
3900 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3902 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3904 /* Handle calls that return values in multiple non-contiguous locations.
3905 The Irix 6 ABI has examples of this. */
3906 if (GET_CODE (to_rtx) == PARALLEL)
3907 emit_group_load (to_rtx, value, TREE_TYPE (from),
3908 int_size_in_bytes (TREE_TYPE (from)));
3909 else if (GET_MODE (to_rtx) == BLKmode)
3910 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
3913 if (POINTER_TYPE_P (TREE_TYPE (to)))
3914 value = convert_memory_address (GET_MODE (to_rtx), value);
3915 emit_move_insn (to_rtx, value);
3917 preserve_temp_slots (to_rtx);
3920 return want_value ? to_rtx : NULL_RTX;
3923 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3924 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3927 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3929 /* Don't move directly into a return register. */
3930 if (TREE_CODE (to) == RESULT_DECL
3931 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
3936 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3938 if (GET_CODE (to_rtx) == PARALLEL)
3939 emit_group_load (to_rtx, temp, TREE_TYPE (from),
3940 int_size_in_bytes (TREE_TYPE (from)));
3942 emit_move_insn (to_rtx, temp);
3944 preserve_temp_slots (to_rtx);
3947 return want_value ? to_rtx : NULL_RTX;
3950 /* In case we are returning the contents of an object which overlaps
3951 the place the value is being stored, use a safe function when copying
3952 a value through a pointer into a structure value return block. */
3953 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3954 && current_function_returns_struct
3955 && !current_function_returns_pcc_struct)
3960 size = expr_size (from);
3961 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
3963 if (TARGET_MEM_FUNCTIONS)
3964 emit_library_call (memmove_libfunc, LCT_NORMAL,
3965 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3966 XEXP (from_rtx, 0), Pmode,
3967 convert_to_mode (TYPE_MODE (sizetype),
3968 size, TYPE_UNSIGNED (sizetype)),
3969 TYPE_MODE (sizetype));
3971 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3972 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3973 XEXP (to_rtx, 0), Pmode,
3974 convert_to_mode (TYPE_MODE (integer_type_node),
3976 TYPE_UNSIGNED (integer_type_node)),
3977 TYPE_MODE (integer_type_node));
3979 preserve_temp_slots (to_rtx);
3982 return want_value ? to_rtx : NULL_RTX;
3985 /* Compute FROM and store the value in the rtx we got. */
3988 result = store_expr (from, to_rtx, want_value);
3989 preserve_temp_slots (result);
3992 return want_value ? result : NULL_RTX;
3995 /* Generate code for computing expression EXP,
3996 and storing the value into TARGET.
3997 TARGET may contain a QUEUED rtx.
3999 If WANT_VALUE & 1 is nonzero, return a copy of the value
4000 not in TARGET, so that we can be sure to use the proper
4001 value in a containing expression even if TARGET has something
4002 else stored in it. If possible, we copy the value through a pseudo
4003 and return that pseudo. Or, if the value is constant, we try to
4004 return the constant. In some cases, we return a pseudo
4005 copied *from* TARGET.
4007 If the mode is BLKmode then we may return TARGET itself.
4008 It turns out that in BLKmode it doesn't cause a problem.
4009 because C has no operators that could combine two different
4010 assignments into the same BLKmode object with different values
4011 with no sequence point. Will other languages need this to
4014 If WANT_VALUE & 1 is 0, we return NULL, to make sure
4015 to catch quickly any cases where the caller uses the value
4016 and fails to set WANT_VALUE.
4018 If WANT_VALUE & 2 is set, this is a store into a call param on the
4019 stack, and block moves may need to be treated specially. */
4022 store_expr (tree exp, rtx target, int want_value)
4025 rtx alt_rtl = NULL_RTX;
4026 rtx mark = mark_queue ();
4027 int dont_return_target = 0;
4028 int dont_store_target = 0;
4030 if (VOID_TYPE_P (TREE_TYPE (exp)))
4032 /* C++ can generate ?: expressions with a throw expression in one
4033 branch and an rvalue in the other. Here, we resolve attempts to
4034 store the throw expression's nonexistent result. */
4037 expand_expr (exp, const0_rtx, VOIDmode, 0);
4040 if (TREE_CODE (exp) == COMPOUND_EXPR)
4042 /* Perform first part of compound expression, then assign from second
4044 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4045 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4047 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4049 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4051 /* For conditional expression, get safe form of the target. Then
4052 test the condition, doing the appropriate assignment on either
4053 side. This avoids the creation of unnecessary temporaries.
4054 For non-BLKmode, it is more efficient not to do this. */
4056 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4059 target = protect_from_queue (target, 1);
4061 do_pending_stack_adjust ();
4063 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4064 start_cleanup_deferral ();
4065 store_expr (TREE_OPERAND (exp, 1), target, want_value & 2);
4066 end_cleanup_deferral ();
4068 emit_jump_insn (gen_jump (lab2));
4071 start_cleanup_deferral ();
4072 store_expr (TREE_OPERAND (exp, 2), target, want_value & 2);
4073 end_cleanup_deferral ();
4078 return want_value & 1 ? target : NULL_RTX;
4080 else if (queued_subexp_p (target))
4081 /* If target contains a postincrement, let's not risk
4082 using it as the place to generate the rhs. */
4084 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4086 /* Expand EXP into a new pseudo. */
4087 temp = gen_reg_rtx (GET_MODE (target));
4088 temp = expand_expr (exp, temp, GET_MODE (target),
4090 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4093 temp = expand_expr (exp, NULL_RTX, GET_MODE (target),
4095 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4097 /* If target is volatile, ANSI requires accessing the value
4098 *from* the target, if it is accessed. So make that happen.
4099 In no case return the target itself. */
4100 if (! MEM_VOLATILE_P (target) && (want_value & 1) != 0)
4101 dont_return_target = 1;
4103 else if ((want_value & 1) != 0
4104 && GET_CODE (target) == MEM
4105 && ! MEM_VOLATILE_P (target)
4106 && GET_MODE (target) != BLKmode)
4107 /* If target is in memory and caller wants value in a register instead,
4108 arrange that. Pass TARGET as target for expand_expr so that,
4109 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4110 We know expand_expr will not use the target in that case.
4111 Don't do this if TARGET is volatile because we are supposed
4112 to write it and then read it. */
4114 temp = expand_expr (exp, target, GET_MODE (target),
4115 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4116 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4118 /* If TEMP is already in the desired TARGET, only copy it from
4119 memory and don't store it there again. */
4121 || (rtx_equal_p (temp, target)
4122 && ! side_effects_p (temp) && ! side_effects_p (target)))
4123 dont_store_target = 1;
4124 temp = copy_to_reg (temp);
4126 dont_return_target = 1;
4128 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4129 /* If this is a scalar in a register that is stored in a wider mode
4130 than the declared mode, compute the result into its declared mode
4131 and then convert to the wider mode. Our value is the computed
4134 rtx inner_target = 0;
4136 /* If we don't want a value, we can do the conversion inside EXP,
4137 which will often result in some optimizations. Do the conversion
4138 in two steps: first change the signedness, if needed, then
4139 the extend. But don't do this if the type of EXP is a subtype
4140 of something else since then the conversion might involve
4141 more than just converting modes. */
4142 if ((want_value & 1) == 0
4143 && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4144 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4146 if (TYPE_UNSIGNED (TREE_TYPE (exp))
4147 != SUBREG_PROMOTED_UNSIGNED_P (target))
4149 (lang_hooks.types.signed_or_unsigned_type
4150 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4152 exp = convert (lang_hooks.types.type_for_mode
4153 (GET_MODE (SUBREG_REG (target)),
4154 SUBREG_PROMOTED_UNSIGNED_P (target)),
4157 inner_target = SUBREG_REG (target);
4160 temp = expand_expr (exp, inner_target, VOIDmode,
4161 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4163 /* If TEMP is a MEM and we want a result value, make the access
4164 now so it gets done only once. Strictly speaking, this is
4165 only necessary if the MEM is volatile, or if the address
4166 overlaps TARGET. But not performing the load twice also
4167 reduces the amount of rtl we generate and then have to CSE. */
4168 if (GET_CODE (temp) == MEM && (want_value & 1) != 0)
4169 temp = copy_to_reg (temp);
4171 /* If TEMP is a VOIDmode constant, use convert_modes to make
4172 sure that we properly convert it. */
4173 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4175 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4176 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4177 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4178 GET_MODE (target), temp,
4179 SUBREG_PROMOTED_UNSIGNED_P (target));
4182 convert_move (SUBREG_REG (target), temp,
4183 SUBREG_PROMOTED_UNSIGNED_P (target));
4185 /* If we promoted a constant, change the mode back down to match
4186 target. Otherwise, the caller might get confused by a result whose
4187 mode is larger than expected. */
4189 if ((want_value & 1) != 0 && GET_MODE (temp) != GET_MODE (target))
4191 if (GET_MODE (temp) != VOIDmode)
4193 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4194 SUBREG_PROMOTED_VAR_P (temp) = 1;
4195 SUBREG_PROMOTED_UNSIGNED_SET (temp,
4196 SUBREG_PROMOTED_UNSIGNED_P (target));
4199 temp = convert_modes (GET_MODE (target),
4200 GET_MODE (SUBREG_REG (target)),
4201 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4204 return want_value & 1 ? temp : NULL_RTX;
4208 temp = expand_expr_real (exp, target, GET_MODE (target),
4210 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
4212 /* Return TARGET if it's a specified hardware register.
4213 If TARGET is a volatile mem ref, either return TARGET
4214 or return a reg copied *from* TARGET; ANSI requires this.
4216 Otherwise, if TEMP is not TARGET, return TEMP
4217 if it is constant (for efficiency),
4218 or if we really want the correct value. */
4219 if (!(target && REG_P (target)
4220 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4221 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4222 && ! rtx_equal_p (temp, target)
4223 && (CONSTANT_P (temp) || (want_value & 1) != 0))
4224 dont_return_target = 1;
4227 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4228 the same as that of TARGET, adjust the constant. This is needed, for
4229 example, in case it is a CONST_DOUBLE and we want only a word-sized
4231 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4232 && TREE_CODE (exp) != ERROR_MARK
4233 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4234 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4235 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
4237 /* If value was not generated in the target, store it there.
4238 Convert the value to TARGET's type first if necessary and emit the
4239 pending incrementations that have been queued when expanding EXP.
4240 Note that we cannot emit the whole queue blindly because this will
4241 effectively disable the POST_INC optimization later.
4243 If TEMP and TARGET compare equal according to rtx_equal_p, but
4244 one or both of them are volatile memory refs, we have to distinguish
4246 - expand_expr has used TARGET. In this case, we must not generate
4247 another copy. This can be detected by TARGET being equal according
4249 - expand_expr has not used TARGET - that means that the source just
4250 happens to have the same RTX form. Since temp will have been created
4251 by expand_expr, it will compare unequal according to == .
4252 We must generate a copy in this case, to reach the correct number
4253 of volatile memory references. */
4255 if ((! rtx_equal_p (temp, target)
4256 || (temp != target && (side_effects_p (temp)
4257 || side_effects_p (target))))
4258 && TREE_CODE (exp) != ERROR_MARK
4259 && ! dont_store_target
4260 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4261 but TARGET is not valid memory reference, TEMP will differ
4262 from TARGET although it is really the same location. */
4263 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
4264 /* If there's nothing to copy, don't bother. Don't call expr_size
4265 unless necessary, because some front-ends (C++) expr_size-hook
4266 aborts on objects that are not supposed to be bit-copied or
4268 && expr_size (exp) != const0_rtx)
4270 emit_insns_enqueued_after_mark (mark);
4271 target = protect_from_queue (target, 1);
4272 temp = protect_from_queue (temp, 0);
4273 if (GET_MODE (temp) != GET_MODE (target)
4274 && GET_MODE (temp) != VOIDmode)
4276 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4277 if (dont_return_target)
4279 /* In this case, we will return TEMP,
4280 so make sure it has the proper mode.
4281 But don't forget to store the value into TARGET. */
4282 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4283 emit_move_insn (target, temp);
4286 convert_move (target, temp, unsignedp);
4289 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4291 /* Handle copying a string constant into an array. The string
4292 constant may be shorter than the array. So copy just the string's
4293 actual length, and clear the rest. First get the size of the data
4294 type of the string, which is actually the size of the target. */
4295 rtx size = expr_size (exp);
4297 if (GET_CODE (size) == CONST_INT
4298 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4299 emit_block_move (target, temp, size,
4301 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4304 /* Compute the size of the data to copy from the string. */
4306 = size_binop (MIN_EXPR,
4307 make_tree (sizetype, size),
4308 size_int (TREE_STRING_LENGTH (exp)));
4310 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4312 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4315 /* Copy that much. */
4316 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4317 TYPE_UNSIGNED (sizetype));
4318 emit_block_move (target, temp, copy_size_rtx,
4320 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4322 /* Figure out how much is left in TARGET that we have to clear.
4323 Do all calculations in ptr_mode. */
4324 if (GET_CODE (copy_size_rtx) == CONST_INT)
4326 size = plus_constant (size, -INTVAL (copy_size_rtx));
4327 target = adjust_address (target, BLKmode,
4328 INTVAL (copy_size_rtx));
4332 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4333 copy_size_rtx, NULL_RTX, 0,
4336 #ifdef POINTERS_EXTEND_UNSIGNED
4337 if (GET_MODE (copy_size_rtx) != Pmode)
4338 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4339 TYPE_UNSIGNED (sizetype));
4342 target = offset_address (target, copy_size_rtx,
4343 highest_pow2_factor (copy_size));
4344 label = gen_label_rtx ();
4345 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4346 GET_MODE (size), 0, label);
4349 if (size != const0_rtx)
4350 clear_storage (target, size);
4356 /* Handle calls that return values in multiple non-contiguous locations.
4357 The Irix 6 ABI has examples of this. */
4358 else if (GET_CODE (target) == PARALLEL)
4359 emit_group_load (target, temp, TREE_TYPE (exp),
4360 int_size_in_bytes (TREE_TYPE (exp)));
4361 else if (GET_MODE (temp) == BLKmode)
4362 emit_block_move (target, temp, expr_size (exp),
4364 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4367 temp = force_operand (temp, target);
4369 emit_move_insn (target, temp);
4373 /* If we don't want a value, return NULL_RTX. */
4374 if ((want_value & 1) == 0)
4377 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4378 ??? The latter test doesn't seem to make sense. */
4379 else if (dont_return_target && GET_CODE (temp) != MEM)
4382 /* Return TARGET itself if it is a hard register. */
4383 else if ((want_value & 1) != 0
4384 && GET_MODE (target) != BLKmode
4385 && ! (REG_P (target)
4386 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4387 return copy_to_reg (target);
4393 /* Examine CTOR. Discover how many scalar fields are set to nonzero
4394 values and place it in *P_NZ_ELTS. Discover how many scalar fields
4395 are set to non-constant values and place it in *P_NC_ELTS. */
4398 categorize_ctor_elements_1 (tree ctor, HOST_WIDE_INT *p_nz_elts,
4399 HOST_WIDE_INT *p_nc_elts)
4401 HOST_WIDE_INT nz_elts, nc_elts;
4407 for (list = CONSTRUCTOR_ELTS (ctor); list; list = TREE_CHAIN (list))
4409 tree value = TREE_VALUE (list);
4410 tree purpose = TREE_PURPOSE (list);
4414 if (TREE_CODE (purpose) == RANGE_EXPR)
4416 tree lo_index = TREE_OPERAND (purpose, 0);
4417 tree hi_index = TREE_OPERAND (purpose, 1);
4419 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4420 mult = (tree_low_cst (hi_index, 1)
4421 - tree_low_cst (lo_index, 1) + 1);
4424 switch (TREE_CODE (value))
4428 HOST_WIDE_INT nz = 0, nc = 0;
4429 categorize_ctor_elements_1 (value, &nz, &nc);
4430 nz_elts += mult * nz;
4431 nc_elts += mult * nc;
4437 if (!initializer_zerop (value))
4441 if (!initializer_zerop (TREE_REALPART (value)))
4443 if (!initializer_zerop (TREE_IMAGPART (value)))
4449 for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
4450 if (!initializer_zerop (TREE_VALUE (v)))
4457 if (!initializer_constant_valid_p (value, TREE_TYPE (value)))
4463 *p_nz_elts += nz_elts;
4464 *p_nc_elts += nc_elts;
4468 categorize_ctor_elements (tree ctor, HOST_WIDE_INT *p_nz_elts,
4469 HOST_WIDE_INT *p_nc_elts)
4473 categorize_ctor_elements_1 (ctor, p_nz_elts, p_nc_elts);
4476 /* Count the number of scalars in TYPE. Return -1 on overflow or
4480 count_type_elements (tree type)
4482 const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
4483 switch (TREE_CODE (type))
4487 tree telts = array_type_nelts (type);
4488 if (telts && host_integerp (telts, 1))
4490 HOST_WIDE_INT n = tree_low_cst (telts, 1);
4491 HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type));
4502 HOST_WIDE_INT n = 0, t;
4505 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4506 if (TREE_CODE (f) == FIELD_DECL)
4508 t = count_type_elements (TREE_TYPE (f));
4518 case QUAL_UNION_TYPE:
4520 /* Ho hum. How in the world do we guess here? Clearly it isn't
4521 right to count the fields. Guess based on the number of words. */
4522 HOST_WIDE_INT n = int_size_in_bytes (type);
4525 return n / UNITS_PER_WORD;
4532 /* ??? This is broke. We should encode the vector width in the tree. */
4533 return GET_MODE_NUNITS (TYPE_MODE (type));
4542 case REFERENCE_TYPE:
4556 /* Return 1 if EXP contains mostly (3/4) zeros. */
4559 mostly_zeros_p (tree exp)
4561 if (TREE_CODE (exp) == CONSTRUCTOR)
4564 HOST_WIDE_INT nz_elts, nc_elts, elts;
4566 /* If there are no ranges of true bits, it is all zero. */
4567 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4568 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4570 categorize_ctor_elements (exp, &nz_elts, &nc_elts);
4571 elts = count_type_elements (TREE_TYPE (exp));
4573 return nz_elts < elts / 4;
4576 return initializer_zerop (exp);
4579 /* Helper function for store_constructor.
4580 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4581 TYPE is the type of the CONSTRUCTOR, not the element type.
4582 CLEARED is as for store_constructor.
4583 ALIAS_SET is the alias set to use for any stores.
4585 This provides a recursive shortcut back to store_constructor when it isn't
4586 necessary to go through store_field. This is so that we can pass through
4587 the cleared field to let store_constructor know that we may not have to
4588 clear a substructure if the outer structure has already been cleared. */
4591 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4592 HOST_WIDE_INT bitpos, enum machine_mode mode,
4593 tree exp, tree type, int cleared, int alias_set)
4595 if (TREE_CODE (exp) == CONSTRUCTOR
4596 /* We can only call store_constructor recursively if the size and
4597 bit position are on a byte boundary. */
4598 && bitpos % BITS_PER_UNIT == 0
4599 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
4600 /* If we have a nonzero bitpos for a register target, then we just
4601 let store_field do the bitfield handling. This is unlikely to
4602 generate unnecessary clear instructions anyways. */
4603 && (bitpos == 0 || GET_CODE (target) == MEM))
4605 if (GET_CODE (target) == MEM)
4607 = adjust_address (target,
4608 GET_MODE (target) == BLKmode
4610 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4611 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4614 /* Update the alias set, if required. */
4615 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4616 && MEM_ALIAS_SET (target) != 0)
4618 target = copy_rtx (target);
4619 set_mem_alias_set (target, alias_set);
4622 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4625 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4629 /* Store the value of constructor EXP into the rtx TARGET.
4630 TARGET is either a REG or a MEM; we know it cannot conflict, since
4631 safe_from_p has been called.
4632 CLEARED is true if TARGET is known to have been zero'd.
4633 SIZE is the number of bytes of TARGET we are allowed to modify: this
4634 may not be the same as the size of EXP if we are assigning to a field
4635 which has been packed to exclude padding bits. */
4638 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
4640 tree type = TREE_TYPE (exp);
4641 #ifdef WORD_REGISTER_OPERATIONS
4642 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4645 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4646 || TREE_CODE (type) == QUAL_UNION_TYPE)
4650 /* If size is zero or the target is already cleared, do nothing. */
4651 if (size == 0 || cleared)
4653 /* We either clear the aggregate or indicate the value is dead. */
4654 else if ((TREE_CODE (type) == UNION_TYPE
4655 || TREE_CODE (type) == QUAL_UNION_TYPE)
4656 && ! CONSTRUCTOR_ELTS (exp))
4657 /* If the constructor is empty, clear the union. */
4659 clear_storage (target, expr_size (exp));
4663 /* If we are building a static constructor into a register,
4664 set the initial value as zero so we can fold the value into
4665 a constant. But if more than one register is involved,
4666 this probably loses. */
4667 else if (REG_P (target) && TREE_STATIC (exp)
4668 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4670 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4674 /* If the constructor has fewer fields than the structure
4675 or if we are initializing the structure to mostly zeros,
4676 clear the whole structure first. Don't do this if TARGET is a
4677 register whose mode size isn't equal to SIZE since clear_storage
4678 can't handle this case. */
4680 && ((list_length (CONSTRUCTOR_ELTS (exp)) != fields_length (type))
4681 || mostly_zeros_p (exp))
4683 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4686 rtx xtarget = target;
4688 if (readonly_fields_p (type))
4690 xtarget = copy_rtx (xtarget);
4691 RTX_UNCHANGING_P (xtarget) = 1;
4694 clear_storage (xtarget, GEN_INT (size));
4699 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4701 /* Store each element of the constructor into
4702 the corresponding field of TARGET. */
4704 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4706 tree field = TREE_PURPOSE (elt);
4707 tree value = TREE_VALUE (elt);
4708 enum machine_mode mode;
4709 HOST_WIDE_INT bitsize;
4710 HOST_WIDE_INT bitpos = 0;
4712 rtx to_rtx = target;
4714 /* Just ignore missing fields.
4715 We cleared the whole structure, above,
4716 if any fields are missing. */
4720 if (cleared && initializer_zerop (value))
4723 if (host_integerp (DECL_SIZE (field), 1))
4724 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4728 mode = DECL_MODE (field);
4729 if (DECL_BIT_FIELD (field))
4732 offset = DECL_FIELD_OFFSET (field);
4733 if (host_integerp (offset, 0)
4734 && host_integerp (bit_position (field), 0))
4736 bitpos = int_bit_position (field);
4740 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4747 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
4748 make_tree (TREE_TYPE (exp),
4751 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4752 if (GET_CODE (to_rtx) != MEM)
4755 #ifdef POINTERS_EXTEND_UNSIGNED
4756 if (GET_MODE (offset_rtx) != Pmode)
4757 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4759 if (GET_MODE (offset_rtx) != ptr_mode)
4760 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4763 to_rtx = offset_address (to_rtx, offset_rtx,
4764 highest_pow2_factor (offset));
4767 if (TREE_READONLY (field))
4769 if (GET_CODE (to_rtx) == MEM)
4770 to_rtx = copy_rtx (to_rtx);
4772 RTX_UNCHANGING_P (to_rtx) = 1;
4775 #ifdef WORD_REGISTER_OPERATIONS
4776 /* If this initializes a field that is smaller than a word, at the
4777 start of a word, try to widen it to a full word.
4778 This special case allows us to output C++ member function
4779 initializations in a form that the optimizers can understand. */
4781 && bitsize < BITS_PER_WORD
4782 && bitpos % BITS_PER_WORD == 0
4783 && GET_MODE_CLASS (mode) == MODE_INT
4784 && TREE_CODE (value) == INTEGER_CST
4786 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4788 tree type = TREE_TYPE (value);
4790 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4792 type = lang_hooks.types.type_for_size
4793 (BITS_PER_WORD, TYPE_UNSIGNED (type));
4794 value = convert (type, value);
4797 if (BYTES_BIG_ENDIAN)
4799 = fold (build (LSHIFT_EXPR, type, value,
4800 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4801 bitsize = BITS_PER_WORD;
4806 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4807 && DECL_NONADDRESSABLE_P (field))
4809 to_rtx = copy_rtx (to_rtx);
4810 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4813 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4814 value, type, cleared,
4815 get_alias_set (TREE_TYPE (field)));
4818 else if (TREE_CODE (type) == ARRAY_TYPE
4819 || TREE_CODE (type) == VECTOR_TYPE)
4825 tree elttype = TREE_TYPE (type);
4827 HOST_WIDE_INT minelt = 0;
4828 HOST_WIDE_INT maxelt = 0;
4832 unsigned n_elts = 0;
4834 if (TREE_CODE (type) == ARRAY_TYPE)
4835 domain = TYPE_DOMAIN (type);
4837 /* Vectors do not have domains; look up the domain of
4838 the array embedded in the debug representation type.
4839 FIXME Would probably be more efficient to treat vectors
4840 separately from arrays. */
4842 domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
4843 domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
4844 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
4846 enum machine_mode mode = GET_MODE (target);
4848 icode = (int) vec_init_optab->handlers[mode].insn_code;
4849 if (icode != CODE_FOR_nothing)
4853 elt_size = GET_MODE_SIZE (GET_MODE_INNER (mode));
4854 n_elts = (GET_MODE_SIZE (mode) / elt_size);
4855 vector = alloca (n_elts);
4856 for (i = 0; i < n_elts; i++)
4857 vector [i] = CONST0_RTX (GET_MODE_INNER (mode));
4862 const_bounds_p = (TYPE_MIN_VALUE (domain)
4863 && TYPE_MAX_VALUE (domain)
4864 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4865 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4867 /* If we have constant bounds for the range of the type, get them. */
4870 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4871 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4874 /* If the constructor has fewer elements than the array,
4875 clear the whole array first. Similarly if this is
4876 static constructor of a non-BLKmode object. */
4877 if (cleared || (REG_P (target) && TREE_STATIC (exp)))
4881 HOST_WIDE_INT count = 0, zero_count = 0;
4882 need_to_clear = ! const_bounds_p;
4884 /* This loop is a more accurate version of the loop in
4885 mostly_zeros_p (it handles RANGE_EXPR in an index).
4886 It is also needed to check for missing elements. */
4887 for (elt = CONSTRUCTOR_ELTS (exp);
4888 elt != NULL_TREE && ! need_to_clear;
4889 elt = TREE_CHAIN (elt))
4891 tree index = TREE_PURPOSE (elt);
4892 HOST_WIDE_INT this_node_count;
4894 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4896 tree lo_index = TREE_OPERAND (index, 0);
4897 tree hi_index = TREE_OPERAND (index, 1);
4899 if (! host_integerp (lo_index, 1)
4900 || ! host_integerp (hi_index, 1))
4906 this_node_count = (tree_low_cst (hi_index, 1)
4907 - tree_low_cst (lo_index, 1) + 1);
4910 this_node_count = 1;
4912 count += this_node_count;
4913 if (mostly_zeros_p (TREE_VALUE (elt)))
4914 zero_count += this_node_count;
4917 /* Clear the entire array first if there are any missing elements,
4918 or if the incidence of zero elements is >= 75%. */
4920 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4924 if (need_to_clear && size > 0 && !vector)
4929 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4931 clear_storage (target, GEN_INT (size));
4935 else if (REG_P (target))
4936 /* Inform later passes that the old value is dead. */
4937 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4939 /* Store each element of the constructor into
4940 the corresponding element of TARGET, determined
4941 by counting the elements. */
4942 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4944 elt = TREE_CHAIN (elt), i++)
4946 enum machine_mode mode;
4947 HOST_WIDE_INT bitsize;
4948 HOST_WIDE_INT bitpos;
4950 tree value = TREE_VALUE (elt);
4951 tree index = TREE_PURPOSE (elt);
4952 rtx xtarget = target;
4954 if (cleared && initializer_zerop (value))
4957 unsignedp = TYPE_UNSIGNED (elttype);
4958 mode = TYPE_MODE (elttype);
4959 if (mode == BLKmode)
4960 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4961 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4964 bitsize = GET_MODE_BITSIZE (mode);
4966 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4968 tree lo_index = TREE_OPERAND (index, 0);
4969 tree hi_index = TREE_OPERAND (index, 1);
4970 rtx index_r, pos_rtx;
4971 HOST_WIDE_INT lo, hi, count;
4977 /* If the range is constant and "small", unroll the loop. */
4979 && host_integerp (lo_index, 0)
4980 && host_integerp (hi_index, 0)
4981 && (lo = tree_low_cst (lo_index, 0),
4982 hi = tree_low_cst (hi_index, 0),
4983 count = hi - lo + 1,
4984 (GET_CODE (target) != MEM
4986 || (host_integerp (TYPE_SIZE (elttype), 1)
4987 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4990 lo -= minelt; hi -= minelt;
4991 for (; lo <= hi; lo++)
4993 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4995 if (GET_CODE (target) == MEM
4996 && !MEM_KEEP_ALIAS_SET_P (target)
4997 && TREE_CODE (type) == ARRAY_TYPE
4998 && TYPE_NONALIASED_COMPONENT (type))
5000 target = copy_rtx (target);
5001 MEM_KEEP_ALIAS_SET_P (target) = 1;
5004 store_constructor_field
5005 (target, bitsize, bitpos, mode, value, type, cleared,
5006 get_alias_set (elttype));
5011 rtx loop_start = gen_label_rtx ();
5012 rtx loop_end = gen_label_rtx ();
5015 expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
5016 unsignedp = TYPE_UNSIGNED (domain);
5018 index = build_decl (VAR_DECL, NULL_TREE, domain);
5021 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5023 SET_DECL_RTL (index, index_r);
5024 if (TREE_CODE (value) == SAVE_EXPR
5025 && SAVE_EXPR_RTL (value) == 0)
5027 /* Make sure value gets expanded once before the
5029 expand_expr (value, const0_rtx, VOIDmode, 0);
5032 store_expr (lo_index, index_r, 0);
5034 /* Build the head of the loop. */
5035 do_pending_stack_adjust ();
5037 emit_label (loop_start);
5039 /* Assign value to element index. */
5041 = convert (ssizetype,
5042 fold (build (MINUS_EXPR, TREE_TYPE (index),
5043 index, TYPE_MIN_VALUE (domain))));
5044 position = size_binop (MULT_EXPR, position,
5046 TYPE_SIZE_UNIT (elttype)));
5048 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
5049 xtarget = offset_address (target, pos_rtx,
5050 highest_pow2_factor (position));
5051 xtarget = adjust_address (xtarget, mode, 0);
5052 if (TREE_CODE (value) == CONSTRUCTOR)
5053 store_constructor (value, xtarget, cleared,
5054 bitsize / BITS_PER_UNIT);
5056 store_expr (value, xtarget, 0);
5058 /* Generate a conditional jump to exit the loop. */
5059 exit_cond = build (LT_EXPR, integer_type_node,
5061 jumpif (exit_cond, loop_end);
5063 /* Update the loop counter, and jump to the head of
5065 expand_increment (build (PREINCREMENT_EXPR,
5067 index, integer_one_node), 0, 0);
5068 emit_jump (loop_start);
5070 /* Build the end of the loop. */
5071 emit_label (loop_end);
5074 else if ((index != 0 && ! host_integerp (index, 0))
5075 || ! host_integerp (TYPE_SIZE (elttype), 1))
5083 index = ssize_int (1);
5086 index = convert (ssizetype,
5087 fold (build (MINUS_EXPR, index,
5088 TYPE_MIN_VALUE (domain))));
5090 position = size_binop (MULT_EXPR, index,
5092 TYPE_SIZE_UNIT (elttype)));
5093 xtarget = offset_address (target,
5094 expand_expr (position, 0, VOIDmode, 0),
5095 highest_pow2_factor (position));
5096 xtarget = adjust_address (xtarget, mode, 0);
5097 store_expr (value, xtarget, 0);
5104 pos = tree_low_cst (index, 0) - minelt;
5107 vector[pos] = expand_expr (value, NULL_RTX, VOIDmode, 0);
5112 bitpos = ((tree_low_cst (index, 0) - minelt)
5113 * tree_low_cst (TYPE_SIZE (elttype), 1));
5115 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5117 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
5118 && TREE_CODE (type) == ARRAY_TYPE
5119 && TYPE_NONALIASED_COMPONENT (type))
5121 target = copy_rtx (target);
5122 MEM_KEEP_ALIAS_SET_P (target) = 1;
5124 store_constructor_field (target, bitsize, bitpos, mode, value,
5125 type, cleared, get_alias_set (elttype));
5130 emit_insn (GEN_FCN (icode) (target,
5131 gen_rtx_PARALLEL (GET_MODE (target),
5132 gen_rtvec_v (n_elts, vector))));
5136 /* Set constructor assignments. */
5137 else if (TREE_CODE (type) == SET_TYPE)
5139 tree elt = CONSTRUCTOR_ELTS (exp);
5140 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
5141 tree domain = TYPE_DOMAIN (type);
5142 tree domain_min, domain_max, bitlength;
5144 /* The default implementation strategy is to extract the constant
5145 parts of the constructor, use that to initialize the target,
5146 and then "or" in whatever non-constant ranges we need in addition.
5148 If a large set is all zero or all ones, it is
5149 probably better to set it using memset (if available) or bzero.
5150 Also, if a large set has just a single range, it may also be
5151 better to first clear all the first clear the set (using
5152 bzero/memset), and set the bits we want. */
5154 /* Check for all zeros. */
5155 if (elt == NULL_TREE && size > 0)
5158 clear_storage (target, GEN_INT (size));
5162 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
5163 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
5164 bitlength = size_binop (PLUS_EXPR,
5165 size_diffop (domain_max, domain_min),
5168 nbits = tree_low_cst (bitlength, 1);
5170 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
5171 are "complicated" (more than one range), initialize (the
5172 constant parts) by copying from a constant. */
5173 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
5174 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
5176 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
5177 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
5178 char *bit_buffer = alloca (nbits);
5179 HOST_WIDE_INT word = 0;
5180 unsigned int bit_pos = 0;
5181 unsigned int ibit = 0;
5182 unsigned int offset = 0; /* In bytes from beginning of set. */
5184 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
5187 if (bit_buffer[ibit])
5189 if (BYTES_BIG_ENDIAN)
5190 word |= (1 << (set_word_size - 1 - bit_pos));
5192 word |= 1 << bit_pos;
5196 if (bit_pos >= set_word_size || ibit == nbits)
5198 if (word != 0 || ! cleared)
5200 rtx datum = gen_int_mode (word, mode);
5203 /* The assumption here is that it is safe to use
5204 XEXP if the set is multi-word, but not if
5205 it's single-word. */
5206 if (GET_CODE (target) == MEM)
5207 to_rtx = adjust_address (target, mode, offset);
5208 else if (offset == 0)
5212 emit_move_insn (to_rtx, datum);
5219 offset += set_word_size / BITS_PER_UNIT;
5224 /* Don't bother clearing storage if the set is all ones. */
5225 if (TREE_CHAIN (elt) != NULL_TREE
5226 || (TREE_PURPOSE (elt) == NULL_TREE
5228 : ( ! host_integerp (TREE_VALUE (elt), 0)
5229 || ! host_integerp (TREE_PURPOSE (elt), 0)
5230 || (tree_low_cst (TREE_VALUE (elt), 0)
5231 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5232 != (HOST_WIDE_INT) nbits))))
5233 clear_storage (target, expr_size (exp));
5235 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5237 /* Start of range of element or NULL. */
5238 tree startbit = TREE_PURPOSE (elt);
5239 /* End of range of element, or element value. */
5240 tree endbit = TREE_VALUE (elt);
5241 HOST_WIDE_INT startb, endb;
5242 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5244 bitlength_rtx = expand_expr (bitlength,
5245 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5247 /* Handle non-range tuple element like [ expr ]. */
5248 if (startbit == NULL_TREE)
5250 startbit = save_expr (endbit);
5254 startbit = convert (sizetype, startbit);
5255 endbit = convert (sizetype, endbit);
5256 if (! integer_zerop (domain_min))
5258 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5259 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5261 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5262 EXPAND_CONST_ADDRESS);
5263 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5264 EXPAND_CONST_ADDRESS);
5270 ((build_qualified_type (lang_hooks.types.type_for_mode
5271 (GET_MODE (target), 0),
5274 emit_move_insn (targetx, target);
5277 else if (GET_CODE (target) == MEM)
5282 /* Optimization: If startbit and endbit are constants divisible
5283 by BITS_PER_UNIT, call memset instead. */
5284 if (TARGET_MEM_FUNCTIONS
5285 && TREE_CODE (startbit) == INTEGER_CST
5286 && TREE_CODE (endbit) == INTEGER_CST
5287 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5288 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5290 emit_library_call (memset_libfunc, LCT_NORMAL,
5292 plus_constant (XEXP (targetx, 0),
5293 startb / BITS_PER_UNIT),
5295 constm1_rtx, TYPE_MODE (integer_type_node),
5296 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5297 TYPE_MODE (sizetype));
5300 emit_library_call (setbits_libfunc, LCT_NORMAL,
5301 VOIDmode, 4, XEXP (targetx, 0),
5302 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5303 startbit_rtx, TYPE_MODE (sizetype),
5304 endbit_rtx, TYPE_MODE (sizetype));
5307 emit_move_insn (target, targetx);
5315 /* Store the value of EXP (an expression tree)
5316 into a subfield of TARGET which has mode MODE and occupies
5317 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5318 If MODE is VOIDmode, it means that we are storing into a bit-field.
5320 If VALUE_MODE is VOIDmode, return nothing in particular.
5321 UNSIGNEDP is not used in this case.
5323 Otherwise, return an rtx for the value stored. This rtx
5324 has mode VALUE_MODE if that is convenient to do.
5325 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5327 TYPE is the type of the underlying object,
5329 ALIAS_SET is the alias set for the destination. This value will
5330 (in general) be different from that for TARGET, since TARGET is a
5331 reference to the containing structure. */
5334 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5335 enum machine_mode mode, tree exp, enum machine_mode value_mode,
5336 int unsignedp, tree type, int alias_set)
5338 HOST_WIDE_INT width_mask = 0;
5340 if (TREE_CODE (exp) == ERROR_MARK)
5343 /* If we have nothing to store, do nothing unless the expression has
5346 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5347 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5348 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5350 /* If we are storing into an unaligned field of an aligned union that is
5351 in a register, we may have the mode of TARGET being an integer mode but
5352 MODE == BLKmode. In that case, get an aligned object whose size and
5353 alignment are the same as TARGET and store TARGET into it (we can avoid
5354 the store if the field being stored is the entire width of TARGET). Then
5355 call ourselves recursively to store the field into a BLKmode version of
5356 that object. Finally, load from the object into TARGET. This is not
5357 very efficient in general, but should only be slightly more expensive
5358 than the otherwise-required unaligned accesses. Perhaps this can be
5359 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5360 twice, once with emit_move_insn and once via store_field. */
5363 && (REG_P (target) || GET_CODE (target) == SUBREG))
5365 rtx object = assign_temp (type, 0, 1, 1);
5366 rtx blk_object = adjust_address (object, BLKmode, 0);
5368 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5369 emit_move_insn (object, target);
5371 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5374 emit_move_insn (target, object);
5376 /* We want to return the BLKmode version of the data. */
5380 if (GET_CODE (target) == CONCAT)
5382 /* We're storing into a struct containing a single __complex. */
5386 return store_expr (exp, target, value_mode != VOIDmode);
5389 /* If the structure is in a register or if the component
5390 is a bit field, we cannot use addressing to access it.
5391 Use bit-field techniques or SUBREG to store in it. */
5393 if (mode == VOIDmode
5394 || (mode != BLKmode && ! direct_store[(int) mode]
5395 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5396 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5398 || GET_CODE (target) == SUBREG
5399 /* If the field isn't aligned enough to store as an ordinary memref,
5400 store it as a bit field. */
5402 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5403 || bitpos % GET_MODE_ALIGNMENT (mode))
5404 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5405 || (bitpos % BITS_PER_UNIT != 0)))
5406 /* If the RHS and field are a constant size and the size of the
5407 RHS isn't the same size as the bitfield, we must use bitfield
5410 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5411 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5413 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5415 /* If BITSIZE is narrower than the size of the type of EXP
5416 we will be narrowing TEMP. Normally, what's wanted are the
5417 low-order bits. However, if EXP's type is a record and this is
5418 big-endian machine, we want the upper BITSIZE bits. */
5419 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5420 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5421 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5422 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5423 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5427 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5429 if (mode != VOIDmode && mode != BLKmode
5430 && mode != TYPE_MODE (TREE_TYPE (exp)))
5431 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5433 /* If the modes of TARGET and TEMP are both BLKmode, both
5434 must be in memory and BITPOS must be aligned on a byte
5435 boundary. If so, we simply do a block copy. */
5436 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5438 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5439 || bitpos % BITS_PER_UNIT != 0)
5442 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5443 emit_block_move (target, temp,
5444 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5448 return value_mode == VOIDmode ? const0_rtx : target;
5451 /* Store the value in the bitfield. */
5452 store_bit_field (target, bitsize, bitpos, mode, temp,
5453 int_size_in_bytes (type));
5455 if (value_mode != VOIDmode)
5457 /* The caller wants an rtx for the value.
5458 If possible, avoid refetching from the bitfield itself. */
5460 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5463 enum machine_mode tmode;
5465 tmode = GET_MODE (temp);
5466 if (tmode == VOIDmode)
5470 return expand_and (tmode, temp,
5471 gen_int_mode (width_mask, tmode),
5474 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5475 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5476 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5479 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5480 NULL_RTX, value_mode, VOIDmode,
5481 int_size_in_bytes (type));
5487 rtx addr = XEXP (target, 0);
5488 rtx to_rtx = target;
5490 /* If a value is wanted, it must be the lhs;
5491 so make the address stable for multiple use. */
5493 if (value_mode != VOIDmode && !REG_P (addr)
5494 && ! CONSTANT_ADDRESS_P (addr)
5495 /* A frame-pointer reference is already stable. */
5496 && ! (GET_CODE (addr) == PLUS
5497 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5498 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5499 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5500 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5502 /* Now build a reference to just the desired component. */
5504 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5506 if (to_rtx == target)
5507 to_rtx = copy_rtx (to_rtx);
5509 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5510 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5511 set_mem_alias_set (to_rtx, alias_set);
5513 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5517 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5518 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5519 codes and find the ultimate containing object, which we return.
5521 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5522 bit position, and *PUNSIGNEDP to the signedness of the field.
5523 If the position of the field is variable, we store a tree
5524 giving the variable offset (in units) in *POFFSET.
5525 This offset is in addition to the bit position.
5526 If the position is not variable, we store 0 in *POFFSET.
5528 If any of the extraction expressions is volatile,
5529 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5531 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5532 is a mode that can be used to access the field. In that case, *PBITSIZE
5535 If the field describes a variable-sized object, *PMODE is set to
5536 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5537 this case, but the address of the object can be found. */
5540 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5541 HOST_WIDE_INT *pbitpos, tree *poffset,
5542 enum machine_mode *pmode, int *punsignedp,
5546 enum machine_mode mode = VOIDmode;
5547 tree offset = size_zero_node;
5548 tree bit_offset = bitsize_zero_node;
5551 /* First get the mode, signedness, and size. We do this from just the
5552 outermost expression. */
5553 if (TREE_CODE (exp) == COMPONENT_REF)
5555 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5556 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5557 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5559 *punsignedp = DECL_UNSIGNED (TREE_OPERAND (exp, 1));
5561 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5563 size_tree = TREE_OPERAND (exp, 1);
5564 *punsignedp = BIT_FIELD_REF_UNSIGNED (exp);
5568 mode = TYPE_MODE (TREE_TYPE (exp));
5569 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
5571 if (mode == BLKmode)
5572 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5574 *pbitsize = GET_MODE_BITSIZE (mode);
5579 if (! host_integerp (size_tree, 1))
5580 mode = BLKmode, *pbitsize = -1;
5582 *pbitsize = tree_low_cst (size_tree, 1);
5585 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5586 and find the ultimate containing object. */
5589 if (TREE_CODE (exp) == BIT_FIELD_REF)
5590 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5591 else if (TREE_CODE (exp) == COMPONENT_REF)
5593 tree field = TREE_OPERAND (exp, 1);
5594 tree this_offset = component_ref_field_offset (exp);
5596 /* If this field hasn't been filled in yet, don't go
5597 past it. This should only happen when folding expressions
5598 made during type construction. */
5599 if (this_offset == 0)
5602 offset = size_binop (PLUS_EXPR, offset, this_offset);
5603 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5604 DECL_FIELD_BIT_OFFSET (field));
5606 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5609 else if (TREE_CODE (exp) == ARRAY_REF
5610 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5612 tree index = TREE_OPERAND (exp, 1);
5613 tree low_bound = array_ref_low_bound (exp);
5614 tree unit_size = array_ref_element_size (exp);
5616 /* We assume all arrays have sizes that are a multiple of a byte.
5617 First subtract the lower bound, if any, in the type of the
5618 index, then convert to sizetype and multiply by the size of the
5620 if (! integer_zerop (low_bound))
5621 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5624 offset = size_binop (PLUS_EXPR, offset,
5625 size_binop (MULT_EXPR,
5626 convert (sizetype, index),
5630 /* We can go inside most conversions: all NON_VALUE_EXPRs, all normal
5631 conversions that don't change the mode, and all view conversions
5632 except those that need to "step up" the alignment. */
5633 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5634 && ! (TREE_CODE (exp) == VIEW_CONVERT_EXPR
5635 && ! ((TYPE_ALIGN (TREE_TYPE (exp))
5636 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5638 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5639 < BIGGEST_ALIGNMENT)
5640 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5641 || TYPE_ALIGN_OK (TREE_TYPE
5642 (TREE_OPERAND (exp, 0))))))
5643 && ! ((TREE_CODE (exp) == NOP_EXPR
5644 || TREE_CODE (exp) == CONVERT_EXPR)
5645 && (TYPE_MODE (TREE_TYPE (exp))
5646 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5649 /* If any reference in the chain is volatile, the effect is volatile. */
5650 if (TREE_THIS_VOLATILE (exp))
5653 exp = TREE_OPERAND (exp, 0);
5656 /* If OFFSET is constant, see if we can return the whole thing as a
5657 constant bit position. Otherwise, split it up. */
5658 if (host_integerp (offset, 0)
5659 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5661 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5662 && host_integerp (tem, 0))
5663 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5665 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5671 /* Return a tree of sizetype representing the size, in bytes, of the element
5672 of EXP, an ARRAY_REF. */
5675 array_ref_element_size (tree exp)
5677 tree aligned_size = TREE_OPERAND (exp, 3);
5678 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5680 /* If a size was specified in the ARRAY_REF, it's the size measured
5681 in alignment units of the element type. So multiply by that value. */
5683 return size_binop (MULT_EXPR, aligned_size,
5684 size_int (TYPE_ALIGN (elmt_type) / BITS_PER_UNIT));
5686 /* Otherwise, take the size from that of the element type. Substitute
5687 any PLACEHOLDER_EXPR that we have. */
5689 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
5692 /* Return a tree representing the lower bound of the array mentioned in
5693 EXP, an ARRAY_REF. */
5696 array_ref_low_bound (tree exp)
5698 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5700 /* If a lower bound is specified in EXP, use it. */
5701 if (TREE_OPERAND (exp, 2))
5702 return TREE_OPERAND (exp, 2);
5704 /* Otherwise, if there is a domain type and it has a lower bound, use it,
5705 substituting for a PLACEHOLDER_EXPR as needed. */
5706 if (domain_type && TYPE_MIN_VALUE (domain_type))
5707 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
5709 /* Otherwise, return a zero of the appropriate type. */
5710 return fold_convert (TREE_TYPE (TREE_OPERAND (exp, 1)), integer_zero_node);
5713 /* Return a tree representing the offset, in bytes, of the field referenced
5714 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
5717 component_ref_field_offset (tree exp)
5719 tree aligned_offset = TREE_OPERAND (exp, 2);
5720 tree field = TREE_OPERAND (exp, 1);
5722 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
5723 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
5726 return size_binop (MULT_EXPR, aligned_offset,
5727 size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT));
5729 /* Otherwise, take the offset from that of the field. Substitute
5730 any PLACEHOLDER_EXPR that we have. */
5732 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
5735 /* Return 1 if T is an expression that get_inner_reference handles. */
5738 handled_component_p (tree t)
5740 switch (TREE_CODE (t))
5745 case ARRAY_RANGE_REF:
5746 case NON_LVALUE_EXPR:
5747 case VIEW_CONVERT_EXPR:
5750 /* ??? Sure they are handled, but get_inner_reference may return
5751 a different PBITSIZE, depending upon whether the expression is
5752 wrapped up in a NOP_EXPR or not, e.g. for bitfields. */
5755 return (TYPE_MODE (TREE_TYPE (t))
5756 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5763 /* Given an rtx VALUE that may contain additions and multiplications, return
5764 an equivalent value that just refers to a register, memory, or constant.
5765 This is done by generating instructions to perform the arithmetic and
5766 returning a pseudo-register containing the value.
5768 The returned value may be a REG, SUBREG, MEM or constant. */
5771 force_operand (rtx value, rtx target)
5774 /* Use subtarget as the target for operand 0 of a binary operation. */
5775 rtx subtarget = get_subtarget (target);
5776 enum rtx_code code = GET_CODE (value);
5778 /* Check for subreg applied to an expression produced by loop optimizer. */
5780 && !REG_P (SUBREG_REG (value))
5781 && GET_CODE (SUBREG_REG (value)) != MEM)
5783 value = simplify_gen_subreg (GET_MODE (value),
5784 force_reg (GET_MODE (SUBREG_REG (value)),
5785 force_operand (SUBREG_REG (value),
5787 GET_MODE (SUBREG_REG (value)),
5788 SUBREG_BYTE (value));
5789 code = GET_CODE (value);
5792 /* Check for a PIC address load. */
5793 if ((code == PLUS || code == MINUS)
5794 && XEXP (value, 0) == pic_offset_table_rtx
5795 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5796 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5797 || GET_CODE (XEXP (value, 1)) == CONST))
5800 subtarget = gen_reg_rtx (GET_MODE (value));
5801 emit_move_insn (subtarget, value);
5805 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5808 target = gen_reg_rtx (GET_MODE (value));
5809 convert_move (target, force_operand (XEXP (value, 0), NULL),
5810 code == ZERO_EXTEND);
5814 if (ARITHMETIC_P (value))
5816 op2 = XEXP (value, 1);
5817 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
5819 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5822 op2 = negate_rtx (GET_MODE (value), op2);
5825 /* Check for an addition with OP2 a constant integer and our first
5826 operand a PLUS of a virtual register and something else. In that
5827 case, we want to emit the sum of the virtual register and the
5828 constant first and then add the other value. This allows virtual
5829 register instantiation to simply modify the constant rather than
5830 creating another one around this addition. */
5831 if (code == PLUS && GET_CODE (op2) == CONST_INT
5832 && GET_CODE (XEXP (value, 0)) == PLUS
5833 && REG_P (XEXP (XEXP (value, 0), 0))
5834 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5835 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5837 rtx temp = expand_simple_binop (GET_MODE (value), code,
5838 XEXP (XEXP (value, 0), 0), op2,
5839 subtarget, 0, OPTAB_LIB_WIDEN);
5840 return expand_simple_binop (GET_MODE (value), code, temp,
5841 force_operand (XEXP (XEXP (value,
5843 target, 0, OPTAB_LIB_WIDEN);
5846 op1 = force_operand (XEXP (value, 0), subtarget);
5847 op2 = force_operand (op2, NULL_RTX);
5851 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5853 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5854 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5855 target, 1, OPTAB_LIB_WIDEN);
5857 return expand_divmod (0,
5858 FLOAT_MODE_P (GET_MODE (value))
5859 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5860 GET_MODE (value), op1, op2, target, 0);
5863 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5867 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5871 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5875 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5876 target, 0, OPTAB_LIB_WIDEN);
5879 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5880 target, 1, OPTAB_LIB_WIDEN);
5883 if (UNARY_P (value))
5885 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5886 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
5889 #ifdef INSN_SCHEDULING
5890 /* On machines that have insn scheduling, we want all memory reference to be
5891 explicit, so we need to deal with such paradoxical SUBREGs. */
5892 if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
5893 && (GET_MODE_SIZE (GET_MODE (value))
5894 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5896 = simplify_gen_subreg (GET_MODE (value),
5897 force_reg (GET_MODE (SUBREG_REG (value)),
5898 force_operand (SUBREG_REG (value),
5900 GET_MODE (SUBREG_REG (value)),
5901 SUBREG_BYTE (value));
5907 /* Subroutine of expand_expr: return nonzero iff there is no way that
5908 EXP can reference X, which is being modified. TOP_P is nonzero if this
5909 call is going to be used to determine whether we need a temporary
5910 for EXP, as opposed to a recursive call to this function.
5912 It is always safe for this routine to return zero since it merely
5913 searches for optimization opportunities. */
5916 safe_from_p (rtx x, tree exp, int top_p)
5920 static tree save_expr_list;
5923 /* If EXP has varying size, we MUST use a target since we currently
5924 have no way of allocating temporaries of variable size
5925 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5926 So we assume here that something at a higher level has prevented a
5927 clash. This is somewhat bogus, but the best we can do. Only
5928 do this when X is BLKmode and when we are at the top level. */
5929 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5930 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5931 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5932 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5933 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5935 && GET_MODE (x) == BLKmode)
5936 /* If X is in the outgoing argument area, it is always safe. */
5937 || (GET_CODE (x) == MEM
5938 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5939 || (GET_CODE (XEXP (x, 0)) == PLUS
5940 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5943 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5944 find the underlying pseudo. */
5945 if (GET_CODE (x) == SUBREG)
5948 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
5952 /* A SAVE_EXPR might appear many times in the expression passed to the
5953 top-level safe_from_p call, and if it has a complex subexpression,
5954 examining it multiple times could result in a combinatorial explosion.
5955 E.g. on an Alpha running at least 200MHz, a Fortran testcase compiled
5956 with optimization took about 28 minutes to compile -- even though it was
5957 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5958 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5959 we have processed. Note that the only test of top_p was above. */
5968 rtn = safe_from_p (x, exp, 0);
5970 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5971 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5976 /* Now look at our tree code and possibly recurse. */
5977 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5980 exp_rtl = DECL_RTL_IF_SET (exp);
5987 if (TREE_CODE (exp) == TREE_LIST)
5991 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
5993 exp = TREE_CHAIN (exp);
5996 if (TREE_CODE (exp) != TREE_LIST)
5997 return safe_from_p (x, exp, 0);
6000 else if (TREE_CODE (exp) == ERROR_MARK)
6001 return 1; /* An already-visited SAVE_EXPR? */
6007 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
6012 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6016 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6017 the expression. If it is set, we conflict iff we are that rtx or
6018 both are in memory. Otherwise, we check all operands of the
6019 expression recursively. */
6021 switch (TREE_CODE (exp))
6024 /* If the operand is static or we are static, we can't conflict.
6025 Likewise if we don't conflict with the operand at all. */
6026 if (staticp (TREE_OPERAND (exp, 0))
6027 || TREE_STATIC (exp)
6028 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6031 /* Otherwise, the only way this can conflict is if we are taking
6032 the address of a DECL a that address if part of X, which is
6034 exp = TREE_OPERAND (exp, 0);
6037 if (!DECL_RTL_SET_P (exp)
6038 || GET_CODE (DECL_RTL (exp)) != MEM)
6041 exp_rtl = XEXP (DECL_RTL (exp), 0);
6046 if (GET_CODE (x) == MEM
6047 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6048 get_alias_set (exp)))
6053 /* Assume that the call will clobber all hard registers and
6055 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6056 || GET_CODE (x) == MEM)
6061 /* If a sequence exists, we would have to scan every instruction
6062 in the sequence to see if it was safe. This is probably not
6064 if (RTL_EXPR_SEQUENCE (exp))
6067 exp_rtl = RTL_EXPR_RTL (exp);
6070 case WITH_CLEANUP_EXPR:
6071 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
6074 case CLEANUP_POINT_EXPR:
6075 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6078 exp_rtl = SAVE_EXPR_RTL (exp);
6082 /* If we've already scanned this, don't do it again. Otherwise,
6083 show we've scanned it and record for clearing the flag if we're
6085 if (TREE_PRIVATE (exp))
6088 TREE_PRIVATE (exp) = 1;
6089 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6091 TREE_PRIVATE (exp) = 0;
6095 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
6099 /* The only operand we look at is operand 1. The rest aren't
6100 part of the expression. */
6101 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
6107 /* If we have an rtx, we do not need to scan our operands. */
6111 nops = first_rtl_op (TREE_CODE (exp));
6112 for (i = 0; i < nops; i++)
6113 if (TREE_OPERAND (exp, i) != 0
6114 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6117 /* If this is a language-specific tree code, it may require
6118 special handling. */
6119 if ((unsigned int) TREE_CODE (exp)
6120 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
6121 && !lang_hooks.safe_from_p (x, exp))
6125 /* If we have an rtl, find any enclosed object. Then see if we conflict
6129 if (GET_CODE (exp_rtl) == SUBREG)
6131 exp_rtl = SUBREG_REG (exp_rtl);
6133 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6137 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6138 are memory and they conflict. */
6139 return ! (rtx_equal_p (x, exp_rtl)
6140 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
6141 && true_dependence (exp_rtl, VOIDmode, x,
6142 rtx_addr_varies_p)));
6145 /* If we reach here, it is safe. */
6149 /* Subroutine of expand_expr: return rtx if EXP is a
6150 variable or parameter; else return 0. */
6156 switch (TREE_CODE (exp))
6160 return DECL_RTL (exp);
6166 /* Return the highest power of two that EXP is known to be a multiple of.
6167 This is used in updating alignment of MEMs in array references. */
6169 static unsigned HOST_WIDE_INT
6170 highest_pow2_factor (tree exp)
6172 unsigned HOST_WIDE_INT c0, c1;
6174 switch (TREE_CODE (exp))
6177 /* We can find the lowest bit that's a one. If the low
6178 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6179 We need to handle this case since we can find it in a COND_EXPR,
6180 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
6181 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6183 if (TREE_CONSTANT_OVERFLOW (exp))
6184 return BIGGEST_ALIGNMENT;
6187 /* Note: tree_low_cst is intentionally not used here,
6188 we don't care about the upper bits. */
6189 c0 = TREE_INT_CST_LOW (exp);
6191 return c0 ? c0 : BIGGEST_ALIGNMENT;
6195 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6196 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6197 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6198 return MIN (c0, c1);
6201 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6202 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6205 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6207 if (integer_pow2p (TREE_OPERAND (exp, 1))
6208 && host_integerp (TREE_OPERAND (exp, 1), 1))
6210 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6211 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6212 return MAX (1, c0 / c1);
6216 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6218 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6221 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6224 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6225 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6226 return MIN (c0, c1);
6235 /* Similar, except that the alignment requirements of TARGET are
6236 taken into account. Assume it is at least as aligned as its
6237 type, unless it is a COMPONENT_REF in which case the layout of
6238 the structure gives the alignment. */
6240 static unsigned HOST_WIDE_INT
6241 highest_pow2_factor_for_target (tree target, tree exp)
6243 unsigned HOST_WIDE_INT target_align, factor;
6245 factor = highest_pow2_factor (exp);
6246 if (TREE_CODE (target) == COMPONENT_REF)
6247 target_align = DECL_ALIGN (TREE_OPERAND (target, 1)) / BITS_PER_UNIT;
6249 target_align = TYPE_ALIGN (TREE_TYPE (target)) / BITS_PER_UNIT;
6250 return MAX (factor, target_align);
6253 /* Expands variable VAR. */
6256 expand_var (tree var)
6258 if (DECL_EXTERNAL (var))
6261 if (TREE_STATIC (var))
6262 /* If this is an inlined copy of a static local variable,
6263 look up the original decl. */
6264 var = DECL_ORIGIN (var);
6266 if (TREE_STATIC (var)
6267 ? !TREE_ASM_WRITTEN (var)
6268 : !DECL_RTL_SET_P (var))
6270 if (TREE_CODE (var) == VAR_DECL && DECL_DEFER_OUTPUT (var))
6272 /* Prepare a mem & address for the decl. */
6275 if (TREE_STATIC (var))
6278 x = gen_rtx_MEM (DECL_MODE (var),
6279 gen_reg_rtx (Pmode));
6281 set_mem_attributes (x, var, 1);
6282 SET_DECL_RTL (var, x);
6284 else if (lang_hooks.expand_decl (var))
6286 else if (TREE_CODE (var) == VAR_DECL && !TREE_STATIC (var))
6288 else if (TREE_CODE (var) == VAR_DECL && TREE_STATIC (var))
6289 rest_of_decl_compilation (var, NULL, 0, 0);
6290 else if (TREE_CODE (var) == TYPE_DECL
6291 || TREE_CODE (var) == CONST_DECL
6292 || TREE_CODE (var) == FUNCTION_DECL
6293 || TREE_CODE (var) == LABEL_DECL)
6294 /* No expansion needed. */;
6300 /* Expands declarations of variables in list VARS. */
6303 expand_vars (tree vars)
6305 for (; vars; vars = TREE_CHAIN (vars))
6309 if (DECL_EXTERNAL (var))
6313 expand_decl_init (var);
6317 /* Subroutine of expand_expr. Expand the two operands of a binary
6318 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6319 The value may be stored in TARGET if TARGET is nonzero. The
6320 MODIFIER argument is as documented by expand_expr. */
6323 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6324 enum expand_modifier modifier)
6326 if (! safe_from_p (target, exp1, 1))
6328 if (operand_equal_p (exp0, exp1, 0))
6330 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6331 *op1 = copy_rtx (*op0);
6335 /* If we need to preserve evaluation order, copy exp0 into its own
6336 temporary variable so that it can't be clobbered by exp1. */
6337 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6338 exp0 = save_expr (exp0);
6339 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6340 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6345 /* expand_expr: generate code for computing expression EXP.
6346 An rtx for the computed value is returned. The value is never null.
6347 In the case of a void EXP, const0_rtx is returned.
6349 The value may be stored in TARGET if TARGET is nonzero.
6350 TARGET is just a suggestion; callers must assume that
6351 the rtx returned may not be the same as TARGET.
6353 If TARGET is CONST0_RTX, it means that the value will be ignored.
6355 If TMODE is not VOIDmode, it suggests generating the
6356 result in mode TMODE. But this is done only when convenient.
6357 Otherwise, TMODE is ignored and the value generated in its natural mode.
6358 TMODE is just a suggestion; callers must assume that
6359 the rtx returned may not have mode TMODE.
6361 Note that TARGET may have neither TMODE nor MODE. In that case, it
6362 probably will not be used.
6364 If MODIFIER is EXPAND_SUM then when EXP is an addition
6365 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6366 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6367 products as above, or REG or MEM, or constant.
6368 Ordinarily in such cases we would output mul or add instructions
6369 and then return a pseudo reg containing the sum.
6371 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6372 it also marks a label as absolutely required (it can't be dead).
6373 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6374 This is used for outputting expressions used in initializers.
6376 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6377 with a constant address even if that address is not normally legitimate.
6378 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6380 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6381 a call parameter. Such targets require special care as we haven't yet
6382 marked TARGET so that it's safe from being trashed by libcalls. We
6383 don't want to use TARGET for anything but the final result;
6384 Intermediate values must go elsewhere. Additionally, calls to
6385 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6387 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6388 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6389 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
6390 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6393 static rtx expand_expr_real_1 (tree, rtx, enum machine_mode,
6394 enum expand_modifier, rtx *);
6397 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
6398 enum expand_modifier modifier, rtx *alt_rtl)
6401 rtx ret, last = NULL;
6403 /* Handle ERROR_MARK before anybody tries to access its type. */
6404 if (TREE_CODE (exp) == ERROR_MARK
6405 || TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK)
6407 ret = CONST0_RTX (tmode);
6408 return ret ? ret : const0_rtx;
6411 if (flag_non_call_exceptions)
6413 rn = lookup_stmt_eh_region (exp);
6414 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */
6416 last = get_last_insn ();
6419 /* If this is an expression of some kind and it has an associated line
6420 number, then emit the line number before expanding the expression.
6422 We need to save and restore the file and line information so that
6423 errors discovered during expansion are emitted with the right
6424 information. It would be better of the diagnostic routines
6425 used the file/line information embedded in the tree nodes rather
6427 if (cfun && EXPR_HAS_LOCATION (exp))
6429 location_t saved_location = input_location;
6430 input_location = EXPR_LOCATION (exp);
6431 emit_line_note (input_location);
6433 /* Record where the insns produced belong. */
6434 if (cfun->dont_emit_block_notes)
6435 record_block_change (TREE_BLOCK (exp));
6437 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6439 input_location = saved_location;
6443 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6446 /* If using non-call exceptions, mark all insns that may trap.
6447 expand_call() will mark CALL_INSNs before we get to this code,
6448 but it doesn't handle libcalls, and these may trap. */
6452 for (insn = next_real_insn (last); insn;
6453 insn = next_real_insn (insn))
6455 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
6456 /* If we want exceptions for non-call insns, any
6457 may_trap_p instruction may throw. */
6458 && GET_CODE (PATTERN (insn)) != CLOBBER
6459 && GET_CODE (PATTERN (insn)) != USE
6460 && (GET_CODE (insn) == CALL_INSN || may_trap_p (PATTERN (insn))))
6462 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
6472 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
6473 enum expand_modifier modifier, rtx *alt_rtl)
6476 tree type = TREE_TYPE (exp);
6478 enum machine_mode mode;
6479 enum tree_code code = TREE_CODE (exp);
6481 rtx subtarget, original_target;
6485 mode = TYPE_MODE (type);
6486 unsignedp = TYPE_UNSIGNED (type);
6488 /* Use subtarget as the target for operand 0 of a binary operation. */
6489 subtarget = get_subtarget (target);
6490 original_target = target;
6491 ignore = (target == const0_rtx
6492 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6493 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6494 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
6495 && TREE_CODE (type) == VOID_TYPE));
6497 /* If we are going to ignore this result, we need only do something
6498 if there is a side-effect somewhere in the expression. If there
6499 is, short-circuit the most common cases here. Note that we must
6500 not call expand_expr with anything but const0_rtx in case this
6501 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6505 if (! TREE_SIDE_EFFECTS (exp))
6508 /* Ensure we reference a volatile object even if value is ignored, but
6509 don't do this if all we are doing is taking its address. */
6510 if (TREE_THIS_VOLATILE (exp)
6511 && TREE_CODE (exp) != FUNCTION_DECL
6512 && mode != VOIDmode && mode != BLKmode
6513 && modifier != EXPAND_CONST_ADDRESS)
6515 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6516 if (GET_CODE (temp) == MEM)
6517 temp = copy_to_reg (temp);
6521 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6522 || code == INDIRECT_REF || code == BUFFER_REF)
6523 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6526 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6527 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6529 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6530 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6533 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6534 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6535 /* If the second operand has no side effects, just evaluate
6537 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6539 else if (code == BIT_FIELD_REF)
6541 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6542 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6543 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6550 /* If will do cse, generate all results into pseudo registers
6551 since 1) that allows cse to find more things
6552 and 2) otherwise cse could produce an insn the machine
6553 cannot support. An exception is a CONSTRUCTOR into a multi-word
6554 MEM: that's much more likely to be most efficient into the MEM.
6555 Another is a CALL_EXPR which must return in memory. */
6557 if (! cse_not_expected && mode != BLKmode && target
6558 && (!REG_P (target) || REGNO (target) < FIRST_PSEUDO_REGISTER)
6559 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
6560 && ! (code == CALL_EXPR && aggregate_value_p (exp, exp)))
6567 tree function = decl_function_context (exp);
6569 temp = label_rtx (exp);
6570 temp = gen_rtx_LABEL_REF (Pmode, temp);
6572 if (function != current_function_decl
6574 LABEL_REF_NONLOCAL_P (temp) = 1;
6576 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
6581 if (!DECL_RTL_SET_P (exp))
6583 error ("%Jprior parameter's size depends on '%D'", exp, exp);
6584 return CONST0_RTX (mode);
6587 /* ... fall through ... */
6590 /* If a static var's type was incomplete when the decl was written,
6591 but the type is complete now, lay out the decl now. */
6592 if (DECL_SIZE (exp) == 0
6593 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6594 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6595 layout_decl (exp, 0);
6597 /* ... fall through ... */
6601 if (DECL_RTL (exp) == 0)
6604 /* Ensure variable marked as used even if it doesn't go through
6605 a parser. If it hasn't be used yet, write out an external
6607 if (! TREE_USED (exp))
6609 assemble_external (exp);
6610 TREE_USED (exp) = 1;
6613 /* Show we haven't gotten RTL for this yet. */
6616 /* Handle variables inherited from containing functions. */
6617 context = decl_function_context (exp);
6619 if (context != 0 && context != current_function_decl
6620 /* If var is static, we don't need a static chain to access it. */
6621 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6622 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6626 /* Mark as non-local and addressable. */
6627 DECL_NONLOCAL (exp) = 1;
6628 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6630 lang_hooks.mark_addressable (exp);
6631 if (GET_CODE (DECL_RTL (exp)) != MEM)
6633 addr = XEXP (DECL_RTL (exp), 0);
6634 if (GET_CODE (addr) == MEM)
6636 = replace_equiv_address (addr,
6637 fix_lexical_addr (XEXP (addr, 0), exp));
6639 addr = fix_lexical_addr (addr, exp);
6641 temp = replace_equiv_address (DECL_RTL (exp), addr);
6644 /* This is the case of an array whose size is to be determined
6645 from its initializer, while the initializer is still being parsed.
6648 else if (GET_CODE (DECL_RTL (exp)) == MEM
6649 && REG_P (XEXP (DECL_RTL (exp), 0)))
6650 temp = validize_mem (DECL_RTL (exp));
6652 /* If DECL_RTL is memory, we are in the normal case and either
6653 the address is not valid or it is not a register and -fforce-addr
6654 is specified, get the address into a register. */
6656 else if (GET_CODE (DECL_RTL (exp)) == MEM
6657 && modifier != EXPAND_CONST_ADDRESS
6658 && modifier != EXPAND_SUM
6659 && modifier != EXPAND_INITIALIZER
6660 && (! memory_address_p (DECL_MODE (exp),
6661 XEXP (DECL_RTL (exp), 0))
6663 && !REG_P (XEXP (DECL_RTL (exp), 0)))))
6666 *alt_rtl = DECL_RTL (exp);
6667 temp = replace_equiv_address (DECL_RTL (exp),
6668 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6671 /* If we got something, return it. But first, set the alignment
6672 if the address is a register. */
6675 if (GET_CODE (temp) == MEM && REG_P (XEXP (temp, 0)))
6676 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6681 /* If the mode of DECL_RTL does not match that of the decl, it
6682 must be a promoted value. We return a SUBREG of the wanted mode,
6683 but mark it so that we know that it was already extended. */
6685 if (REG_P (DECL_RTL (exp))
6686 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6688 /* Get the signedness used for this variable. Ensure we get the
6689 same mode we got when the variable was declared. */
6690 if (GET_MODE (DECL_RTL (exp))
6691 != promote_mode (type, DECL_MODE (exp), &unsignedp,
6692 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
6695 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6696 SUBREG_PROMOTED_VAR_P (temp) = 1;
6697 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6701 return DECL_RTL (exp);
6704 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6705 TREE_INT_CST_HIGH (exp), mode);
6707 /* ??? If overflow is set, fold will have done an incomplete job,
6708 which can result in (plus xx (const_int 0)), which can get
6709 simplified by validate_replace_rtx during virtual register
6710 instantiation, which can result in unrecognizable insns.
6711 Avoid this by forcing all overflows into registers. */
6712 if (TREE_CONSTANT_OVERFLOW (exp)
6713 && modifier != EXPAND_INITIALIZER)
6714 temp = force_reg (mode, temp);
6719 return const_vector_from_tree (exp);
6722 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
6725 /* If optimized, generate immediate CONST_DOUBLE
6726 which will be turned into memory by reload if necessary.
6728 We used to force a register so that loop.c could see it. But
6729 this does not allow gen_* patterns to perform optimizations with
6730 the constants. It also produces two insns in cases like "x = 1.0;".
6731 On most machines, floating-point constants are not permitted in
6732 many insns, so we'd end up copying it to a register in any case.
6734 Now, we do the copying in expand_binop, if appropriate. */
6735 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6736 TYPE_MODE (TREE_TYPE (exp)));
6739 /* Handle evaluating a complex constant in a CONCAT target. */
6740 if (original_target && GET_CODE (original_target) == CONCAT)
6742 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6745 rtarg = XEXP (original_target, 0);
6746 itarg = XEXP (original_target, 1);
6748 /* Move the real and imaginary parts separately. */
6749 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
6750 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
6753 emit_move_insn (rtarg, op0);
6755 emit_move_insn (itarg, op1);
6757 return original_target;
6760 /* ... fall through ... */
6763 temp = output_constant_def (exp, 1);
6765 /* temp contains a constant address.
6766 On RISC machines where a constant address isn't valid,
6767 make some insns to get that address into a register. */
6768 if (modifier != EXPAND_CONST_ADDRESS
6769 && modifier != EXPAND_INITIALIZER
6770 && modifier != EXPAND_SUM
6771 && (! memory_address_p (mode, XEXP (temp, 0))
6772 || flag_force_addr))
6773 return replace_equiv_address (temp,
6774 copy_rtx (XEXP (temp, 0)));
6778 context = decl_function_context (exp);
6780 /* If this SAVE_EXPR was at global context, assume we are an
6781 initialization function and move it into our context. */
6783 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6785 if (context == current_function_decl)
6788 /* If this is non-local, handle it. */
6791 /* The following call just exists to abort if the context is
6792 not of a containing function. */
6793 find_function_data (context);
6795 temp = SAVE_EXPR_RTL (exp);
6796 if (temp && REG_P (temp))
6798 put_var_into_stack (exp, /*rescan=*/true);
6799 temp = SAVE_EXPR_RTL (exp);
6801 if (temp == 0 || GET_CODE (temp) != MEM)
6804 replace_equiv_address (temp,
6805 fix_lexical_addr (XEXP (temp, 0), exp));
6807 if (SAVE_EXPR_RTL (exp) == 0)
6809 if (mode == VOIDmode)
6812 temp = assign_temp (build_qualified_type (type,
6814 | TYPE_QUAL_CONST)),
6817 SAVE_EXPR_RTL (exp) = temp;
6818 if (!optimize && REG_P (temp))
6819 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6822 /* If the mode of TEMP does not match that of the expression, it
6823 must be a promoted value. We pass store_expr a SUBREG of the
6824 wanted mode but mark it so that we know that it was already
6827 if (REG_P (temp) && GET_MODE (temp) != mode)
6829 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6830 promote_mode (type, mode, &unsignedp, 0);
6831 SUBREG_PROMOTED_VAR_P (temp) = 1;
6832 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6835 if (temp == const0_rtx)
6836 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6838 store_expr (TREE_OPERAND (exp, 0), temp,
6839 modifier == EXPAND_STACK_PARM ? 2 : 0);
6841 TREE_USED (exp) = 1;
6844 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6845 must be a promoted value. We return a SUBREG of the wanted mode,
6846 but mark it so that we know that it was already extended. */
6848 if (REG_P (SAVE_EXPR_RTL (exp))
6849 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6851 /* Compute the signedness and make the proper SUBREG. */
6852 promote_mode (type, mode, &unsignedp, 0);
6853 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6854 SUBREG_PROMOTED_VAR_P (temp) = 1;
6855 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6859 return SAVE_EXPR_RTL (exp);
6864 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6865 TREE_OPERAND (exp, 0)
6866 = lang_hooks.unsave_expr_now (TREE_OPERAND (exp, 0));
6871 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6872 expand_goto (TREE_OPERAND (exp, 0));
6874 expand_computed_goto (TREE_OPERAND (exp, 0));
6877 /* These are lowered during gimplification, so we should never ever
6883 case LABELED_BLOCK_EXPR:
6884 if (LABELED_BLOCK_BODY (exp))
6885 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1);
6886 /* Should perhaps use expand_label, but this is simpler and safer. */
6887 do_pending_stack_adjust ();
6888 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6891 case EXIT_BLOCK_EXPR:
6892 if (EXIT_BLOCK_RETURN (exp))
6893 sorry ("returned value in block_exit_expr");
6894 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6899 tree block = BIND_EXPR_BLOCK (exp);
6902 if (TREE_CODE (BIND_EXPR_BODY (exp)) != RTL_EXPR)
6904 /* If we're in functions-as-trees mode, this BIND_EXPR represents
6905 the block, so we need to emit NOTE_INSN_BLOCK_* notes. */
6906 mark_ends = (block != NULL_TREE);
6907 expand_start_bindings_and_block (mark_ends ? 0 : 2, block);
6911 /* If we're not in functions-as-trees mode, we've already emitted
6912 those notes into our RTL_EXPR, so we just want to splice our BLOCK
6913 into the enclosing one. */
6916 /* Need to open a binding contour here because
6917 if there are any cleanups they must be contained here. */
6918 expand_start_bindings_and_block (2, NULL_TREE);
6920 /* Mark the corresponding BLOCK for output in its proper place. */
6923 if (TREE_USED (block))
6925 lang_hooks.decls.insert_block (block);
6929 /* If VARS have not yet been expanded, expand them now. */
6930 expand_vars (BIND_EXPR_VARS (exp));
6932 /* TARGET was clobbered early in this function. The correct
6933 indicator or whether or not we need the value of this
6934 expression is the IGNORE variable. */
6935 temp = expand_expr (BIND_EXPR_BODY (exp),
6936 ignore ? const0_rtx : target,
6939 expand_end_bindings (BIND_EXPR_VARS (exp), mark_ends, 0);
6945 if (RTL_EXPR_SEQUENCE (exp))
6947 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6949 emit_insn (RTL_EXPR_SEQUENCE (exp));
6950 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6952 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6953 free_temps_for_rtl_expr (exp);
6955 *alt_rtl = RTL_EXPR_ALT_RTL (exp);
6956 return RTL_EXPR_RTL (exp);
6959 /* If we don't need the result, just ensure we evaluate any
6965 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6966 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
6971 /* All elts simple constants => refer to a constant in memory. But
6972 if this is a non-BLKmode mode, let it store a field at a time
6973 since that should make a CONST_INT or CONST_DOUBLE when we
6974 fold. Likewise, if we have a target we can use, it is best to
6975 store directly into the target unless the type is large enough
6976 that memcpy will be used. If we are making an initializer and
6977 all operands are constant, put it in memory as well.
6979 FIXME: Avoid trying to fill vector constructors piece-meal.
6980 Output them with output_constant_def below unless we're sure
6981 they're zeros. This should go away when vector initializers
6982 are treated like VECTOR_CST instead of arrays.
6984 else if ((TREE_STATIC (exp)
6985 && ((mode == BLKmode
6986 && ! (target != 0 && safe_from_p (target, exp, 1)))
6987 || TREE_ADDRESSABLE (exp)
6988 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6989 && (! MOVE_BY_PIECES_P
6990 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6992 && ! mostly_zeros_p (exp))))
6993 || ((modifier == EXPAND_INITIALIZER
6994 || modifier == EXPAND_CONST_ADDRESS)
6995 && TREE_CONSTANT (exp)))
6997 rtx constructor = output_constant_def (exp, 1);
6999 if (modifier != EXPAND_CONST_ADDRESS
7000 && modifier != EXPAND_INITIALIZER
7001 && modifier != EXPAND_SUM)
7002 constructor = validize_mem (constructor);
7008 /* Handle calls that pass values in multiple non-contiguous
7009 locations. The Irix 6 ABI has examples of this. */
7010 if (target == 0 || ! safe_from_p (target, exp, 1)
7011 || GET_CODE (target) == PARALLEL
7012 || modifier == EXPAND_STACK_PARM)
7014 = assign_temp (build_qualified_type (type,
7016 | (TREE_READONLY (exp)
7017 * TYPE_QUAL_CONST))),
7018 0, TREE_ADDRESSABLE (exp), 1);
7020 store_constructor (exp, target, 0, int_expr_size (exp));
7026 tree exp1 = TREE_OPERAND (exp, 0);
7028 if (modifier != EXPAND_WRITE)
7032 t = fold_read_from_constant_string (exp);
7034 return expand_expr (t, target, tmode, modifier);
7037 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
7038 op0 = memory_address (mode, op0);
7039 temp = gen_rtx_MEM (mode, op0);
7040 set_mem_attributes (temp, exp, 0);
7042 /* If we are writing to this object and its type is a record with
7043 readonly fields, we must mark it as readonly so it will
7044 conflict with readonly references to those fields. */
7045 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
7046 RTX_UNCHANGING_P (temp) = 1;
7053 #ifdef ENABLE_CHECKING
7054 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
7059 tree array = TREE_OPERAND (exp, 0);
7060 tree low_bound = array_ref_low_bound (exp);
7061 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
7064 /* Optimize the special-case of a zero lower bound.
7066 We convert the low_bound to sizetype to avoid some problems
7067 with constant folding. (E.g. suppose the lower bound is 1,
7068 and its mode is QI. Without the conversion, (ARRAY
7069 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
7070 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
7072 if (! integer_zerop (low_bound))
7073 index = size_diffop (index, convert (sizetype, low_bound));
7075 /* Fold an expression like: "foo"[2].
7076 This is not done in fold so it won't happen inside &.
7077 Don't fold if this is for wide characters since it's too
7078 difficult to do correctly and this is a very rare case. */
7080 if (modifier != EXPAND_CONST_ADDRESS
7081 && modifier != EXPAND_INITIALIZER
7082 && modifier != EXPAND_MEMORY)
7084 tree t = fold_read_from_constant_string (exp);
7087 return expand_expr (t, target, tmode, modifier);
7090 /* If this is a constant index into a constant array,
7091 just get the value from the array. Handle both the cases when
7092 we have an explicit constructor and when our operand is a variable
7093 that was declared const. */
7095 if (modifier != EXPAND_CONST_ADDRESS
7096 && modifier != EXPAND_INITIALIZER
7097 && modifier != EXPAND_MEMORY
7098 && TREE_CODE (array) == CONSTRUCTOR
7099 && ! TREE_SIDE_EFFECTS (array)
7100 && TREE_CODE (index) == INTEGER_CST
7101 && 0 > compare_tree_int (index,
7102 list_length (CONSTRUCTOR_ELTS
7103 (TREE_OPERAND (exp, 0)))))
7107 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
7108 i = TREE_INT_CST_LOW (index);
7109 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
7113 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
7117 else if (optimize >= 1
7118 && modifier != EXPAND_CONST_ADDRESS
7119 && modifier != EXPAND_INITIALIZER
7120 && modifier != EXPAND_MEMORY
7121 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
7122 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
7123 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
7124 && targetm.binds_local_p (array))
7126 if (TREE_CODE (index) == INTEGER_CST)
7128 tree init = DECL_INITIAL (array);
7130 if (TREE_CODE (init) == CONSTRUCTOR)
7134 for (elem = CONSTRUCTOR_ELTS (init);
7136 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
7137 elem = TREE_CHAIN (elem))
7140 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
7141 return expand_expr (fold (TREE_VALUE (elem)), target,
7144 else if (TREE_CODE (init) == STRING_CST
7145 && 0 > compare_tree_int (index,
7146 TREE_STRING_LENGTH (init)))
7148 tree type = TREE_TYPE (TREE_TYPE (init));
7149 enum machine_mode mode = TYPE_MODE (type);
7151 if (GET_MODE_CLASS (mode) == MODE_INT
7152 && GET_MODE_SIZE (mode) == 1)
7153 return gen_int_mode (TREE_STRING_POINTER (init)
7154 [TREE_INT_CST_LOW (index)], mode);
7159 goto normal_inner_ref;
7162 /* If the operand is a CONSTRUCTOR, we can just extract the
7163 appropriate field if it is present. */
7164 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
7168 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
7169 elt = TREE_CHAIN (elt))
7170 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
7171 /* We can normally use the value of the field in the
7172 CONSTRUCTOR. However, if this is a bitfield in
7173 an integral mode that we can fit in a HOST_WIDE_INT,
7174 we must mask only the number of bits in the bitfield,
7175 since this is done implicitly by the constructor. If
7176 the bitfield does not meet either of those conditions,
7177 we can't do this optimization. */
7178 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
7179 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
7181 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
7182 <= HOST_BITS_PER_WIDE_INT))))
7184 if (DECL_BIT_FIELD (TREE_PURPOSE (elt))
7185 && modifier == EXPAND_STACK_PARM)
7187 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
7188 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
7190 HOST_WIDE_INT bitsize
7191 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
7192 enum machine_mode imode
7193 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
7195 if (TYPE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
7197 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7198 op0 = expand_and (imode, op0, op1, target);
7203 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
7206 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7208 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7216 goto normal_inner_ref;
7219 case ARRAY_RANGE_REF:
7222 enum machine_mode mode1;
7223 HOST_WIDE_INT bitsize, bitpos;
7226 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7227 &mode1, &unsignedp, &volatilep);
7230 /* If we got back the original object, something is wrong. Perhaps
7231 we are evaluating an expression too early. In any event, don't
7232 infinitely recurse. */
7236 /* If TEM's type is a union of variable size, pass TARGET to the inner
7237 computation, since it will need a temporary and TARGET is known
7238 to have to do. This occurs in unchecked conversion in Ada. */
7242 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7243 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7245 && modifier != EXPAND_STACK_PARM
7246 ? target : NULL_RTX),
7248 (modifier == EXPAND_INITIALIZER
7249 || modifier == EXPAND_CONST_ADDRESS
7250 || modifier == EXPAND_STACK_PARM)
7251 ? modifier : EXPAND_NORMAL);
7253 /* If this is a constant, put it into a register if it is a
7254 legitimate constant and OFFSET is 0 and memory if it isn't. */
7255 if (CONSTANT_P (op0))
7257 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7258 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7260 op0 = force_reg (mode, op0);
7262 op0 = validize_mem (force_const_mem (mode, op0));
7265 /* Otherwise, if this object not in memory and we either have an
7266 offset or a BLKmode result, put it there. This case can't occur in
7267 C, but can in Ada if we have unchecked conversion of an expression
7268 from a scalar type to an array or record type or for an
7269 ARRAY_RANGE_REF whose type is BLKmode. */
7270 else if (GET_CODE (op0) != MEM
7272 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
7274 /* If the operand is a SAVE_EXPR, we can deal with this by
7275 forcing the SAVE_EXPR into memory. */
7276 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
7278 put_var_into_stack (TREE_OPERAND (exp, 0),
7280 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7285 = build_qualified_type (TREE_TYPE (tem),
7286 (TYPE_QUALS (TREE_TYPE (tem))
7287 | TYPE_QUAL_CONST));
7288 rtx memloc = assign_temp (nt, 1, 1, 1);
7290 emit_move_insn (memloc, op0);
7297 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7300 if (GET_CODE (op0) != MEM)
7303 #ifdef POINTERS_EXTEND_UNSIGNED
7304 if (GET_MODE (offset_rtx) != Pmode)
7305 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7307 if (GET_MODE (offset_rtx) != ptr_mode)
7308 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7311 if (GET_MODE (op0) == BLKmode
7312 /* A constant address in OP0 can have VOIDmode, we must
7313 not try to call force_reg in that case. */
7314 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7316 && (bitpos % bitsize) == 0
7317 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7318 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7320 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7324 op0 = offset_address (op0, offset_rtx,
7325 highest_pow2_factor (offset));
7328 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7329 record its alignment as BIGGEST_ALIGNMENT. */
7330 if (GET_CODE (op0) == MEM && bitpos == 0 && offset != 0
7331 && is_aligning_offset (offset, tem))
7332 set_mem_align (op0, BIGGEST_ALIGNMENT);
7334 /* Don't forget about volatility even if this is a bitfield. */
7335 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7337 if (op0 == orig_op0)
7338 op0 = copy_rtx (op0);
7340 MEM_VOLATILE_P (op0) = 1;
7343 /* The following code doesn't handle CONCAT.
7344 Assume only bitpos == 0 can be used for CONCAT, due to
7345 one element arrays having the same mode as its element. */
7346 if (GET_CODE (op0) == CONCAT)
7348 if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
7353 /* In cases where an aligned union has an unaligned object
7354 as a field, we might be extracting a BLKmode value from
7355 an integer-mode (e.g., SImode) object. Handle this case
7356 by doing the extract into an object as wide as the field
7357 (which we know to be the width of a basic mode), then
7358 storing into memory, and changing the mode to BLKmode. */
7359 if (mode1 == VOIDmode
7360 || REG_P (op0) || GET_CODE (op0) == SUBREG
7361 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7362 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7363 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7364 && modifier != EXPAND_CONST_ADDRESS
7365 && modifier != EXPAND_INITIALIZER)
7366 /* If the field isn't aligned enough to fetch as a memref,
7367 fetch it as a bit field. */
7368 || (mode1 != BLKmode
7369 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7370 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
7371 || (GET_CODE (op0) == MEM
7372 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
7373 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
7374 && ((modifier == EXPAND_CONST_ADDRESS
7375 || modifier == EXPAND_INITIALIZER)
7377 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
7378 || (bitpos % BITS_PER_UNIT != 0)))
7379 /* If the type and the field are a constant size and the
7380 size of the type isn't the same size as the bitfield,
7381 we must use bitfield operations. */
7383 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7385 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7388 enum machine_mode ext_mode = mode;
7390 if (ext_mode == BLKmode
7391 && ! (target != 0 && GET_CODE (op0) == MEM
7392 && GET_CODE (target) == MEM
7393 && bitpos % BITS_PER_UNIT == 0))
7394 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7396 if (ext_mode == BLKmode)
7399 target = assign_temp (type, 0, 1, 1);
7404 /* In this case, BITPOS must start at a byte boundary and
7405 TARGET, if specified, must be a MEM. */
7406 if (GET_CODE (op0) != MEM
7407 || (target != 0 && GET_CODE (target) != MEM)
7408 || bitpos % BITS_PER_UNIT != 0)
7411 emit_block_move (target,
7412 adjust_address (op0, VOIDmode,
7413 bitpos / BITS_PER_UNIT),
7414 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7416 (modifier == EXPAND_STACK_PARM
7417 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7422 op0 = validize_mem (op0);
7424 if (GET_CODE (op0) == MEM && REG_P (XEXP (op0, 0)))
7425 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7427 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7428 (modifier == EXPAND_STACK_PARM
7429 ? NULL_RTX : target),
7431 int_size_in_bytes (TREE_TYPE (tem)));
7433 /* If the result is a record type and BITSIZE is narrower than
7434 the mode of OP0, an integral mode, and this is a big endian
7435 machine, we must put the field into the high-order bits. */
7436 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7437 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7438 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7439 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7440 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7444 /* If the result type is BLKmode, store the data into a temporary
7445 of the appropriate type, but with the mode corresponding to the
7446 mode for the data we have (op0's mode). It's tempting to make
7447 this a constant type, since we know it's only being stored once,
7448 but that can cause problems if we are taking the address of this
7449 COMPONENT_REF because the MEM of any reference via that address
7450 will have flags corresponding to the type, which will not
7451 necessarily be constant. */
7452 if (mode == BLKmode)
7455 = assign_stack_temp_for_type
7456 (ext_mode, GET_MODE_BITSIZE (ext_mode), 0, type);
7458 emit_move_insn (new, op0);
7459 op0 = copy_rtx (new);
7460 PUT_MODE (op0, BLKmode);
7461 set_mem_attributes (op0, exp, 1);
7467 /* If the result is BLKmode, use that to access the object
7469 if (mode == BLKmode)
7472 /* Get a reference to just this component. */
7473 if (modifier == EXPAND_CONST_ADDRESS
7474 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7475 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7477 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7479 if (op0 == orig_op0)
7480 op0 = copy_rtx (op0);
7482 set_mem_attributes (op0, exp, 0);
7483 if (REG_P (XEXP (op0, 0)))
7484 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7486 MEM_VOLATILE_P (op0) |= volatilep;
7487 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7488 || modifier == EXPAND_CONST_ADDRESS
7489 || modifier == EXPAND_INITIALIZER)
7491 else if (target == 0)
7492 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7494 convert_move (target, op0, unsignedp);
7500 rtx insn, before = get_last_insn (), vtbl_ref;
7502 /* Evaluate the interior expression. */
7503 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7506 /* Get or create an instruction off which to hang a note. */
7507 if (REG_P (subtarget))
7510 insn = get_last_insn ();
7513 if (! INSN_P (insn))
7514 insn = prev_nonnote_insn (insn);
7518 target = gen_reg_rtx (GET_MODE (subtarget));
7519 insn = emit_move_insn (target, subtarget);
7522 /* Collect the data for the note. */
7523 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7524 vtbl_ref = plus_constant (vtbl_ref,
7525 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7526 /* Discard the initial CONST that was added. */
7527 vtbl_ref = XEXP (vtbl_ref, 0);
7530 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7535 /* Intended for a reference to a buffer of a file-object in Pascal.
7536 But it's not certain that a special tree code will really be
7537 necessary for these. INDIRECT_REF might work for them. */
7543 /* Pascal set IN expression.
7546 rlo = set_low - (set_low%bits_per_word);
7547 the_word = set [ (index - rlo)/bits_per_word ];
7548 bit_index = index % bits_per_word;
7549 bitmask = 1 << bit_index;
7550 return !!(the_word & bitmask); */
7552 tree set = TREE_OPERAND (exp, 0);
7553 tree index = TREE_OPERAND (exp, 1);
7554 int iunsignedp = TYPE_UNSIGNED (TREE_TYPE (index));
7555 tree set_type = TREE_TYPE (set);
7556 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7557 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7558 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7559 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7560 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7561 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7562 rtx setaddr = XEXP (setval, 0);
7563 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7565 rtx diff, quo, rem, addr, bit, result;
7567 /* If domain is empty, answer is no. Likewise if index is constant
7568 and out of bounds. */
7569 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7570 && TREE_CODE (set_low_bound) == INTEGER_CST
7571 && tree_int_cst_lt (set_high_bound, set_low_bound))
7572 || (TREE_CODE (index) == INTEGER_CST
7573 && TREE_CODE (set_low_bound) == INTEGER_CST
7574 && tree_int_cst_lt (index, set_low_bound))
7575 || (TREE_CODE (set_high_bound) == INTEGER_CST
7576 && TREE_CODE (index) == INTEGER_CST
7577 && tree_int_cst_lt (set_high_bound, index))))
7581 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7583 /* If we get here, we have to generate the code for both cases
7584 (in range and out of range). */
7586 op0 = gen_label_rtx ();
7587 op1 = gen_label_rtx ();
7589 if (! (GET_CODE (index_val) == CONST_INT
7590 && GET_CODE (lo_r) == CONST_INT))
7591 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7592 GET_MODE (index_val), iunsignedp, op1);
7594 if (! (GET_CODE (index_val) == CONST_INT
7595 && GET_CODE (hi_r) == CONST_INT))
7596 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7597 GET_MODE (index_val), iunsignedp, op1);
7599 /* Calculate the element number of bit zero in the first word
7601 if (GET_CODE (lo_r) == CONST_INT)
7602 rlow = GEN_INT (INTVAL (lo_r)
7603 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7605 rlow = expand_binop (index_mode, and_optab, lo_r,
7606 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7607 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7609 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7610 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7612 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7613 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7614 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7615 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7617 addr = memory_address (byte_mode,
7618 expand_binop (index_mode, add_optab, diff,
7619 setaddr, NULL_RTX, iunsignedp,
7622 /* Extract the bit we want to examine. */
7623 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7624 gen_rtx_MEM (byte_mode, addr),
7625 make_tree (TREE_TYPE (index), rem),
7627 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7628 GET_MODE (target) == byte_mode ? target : 0,
7629 1, OPTAB_LIB_WIDEN);
7631 if (result != target)
7632 convert_move (target, result, 1);
7634 /* Output the code to handle the out-of-range case. */
7637 emit_move_insn (target, const0_rtx);
7642 case WITH_CLEANUP_EXPR:
7643 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7645 WITH_CLEANUP_EXPR_RTL (exp)
7646 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7647 expand_decl_cleanup_eh (NULL_TREE, TREE_OPERAND (exp, 1),
7648 CLEANUP_EH_ONLY (exp));
7650 /* That's it for this cleanup. */
7651 TREE_OPERAND (exp, 1) = 0;
7653 return WITH_CLEANUP_EXPR_RTL (exp);
7655 case CLEANUP_POINT_EXPR:
7657 /* Start a new binding layer that will keep track of all cleanup
7658 actions to be performed. */
7659 expand_start_bindings (2);
7661 target_temp_slot_level = temp_slot_level;
7663 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7664 /* If we're going to use this value, load it up now. */
7666 op0 = force_not_mem (op0);
7667 preserve_temp_slots (op0);
7668 expand_end_bindings (NULL_TREE, 0, 0);
7673 /* Check for a built-in function. */
7674 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7675 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7677 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7679 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7680 == BUILT_IN_FRONTEND)
7681 return lang_hooks.expand_expr (exp, original_target,
7685 return expand_builtin (exp, target, subtarget, tmode, ignore);
7688 return expand_call (exp, target, ignore);
7690 case NON_LVALUE_EXPR:
7693 case REFERENCE_EXPR:
7694 if (TREE_OPERAND (exp, 0) == error_mark_node)
7697 if (TREE_CODE (type) == UNION_TYPE)
7699 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7701 /* If both input and output are BLKmode, this conversion isn't doing
7702 anything except possibly changing memory attribute. */
7703 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7705 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7708 result = copy_rtx (result);
7709 set_mem_attributes (result, exp, 0);
7715 if (TYPE_MODE (type) != BLKmode)
7716 target = gen_reg_rtx (TYPE_MODE (type));
7718 target = assign_temp (type, 0, 1, 1);
7721 if (GET_CODE (target) == MEM)
7722 /* Store data into beginning of memory target. */
7723 store_expr (TREE_OPERAND (exp, 0),
7724 adjust_address (target, TYPE_MODE (valtype), 0),
7725 modifier == EXPAND_STACK_PARM ? 2 : 0);
7727 else if (REG_P (target))
7728 /* Store this field into a union of the proper type. */
7729 store_field (target,
7730 MIN ((int_size_in_bytes (TREE_TYPE
7731 (TREE_OPERAND (exp, 0)))
7733 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7734 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7735 VOIDmode, 0, type, 0);
7739 /* Return the entire union. */
7743 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7745 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7748 /* If the signedness of the conversion differs and OP0 is
7749 a promoted SUBREG, clear that indication since we now
7750 have to do the proper extension. */
7751 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7752 && GET_CODE (op0) == SUBREG)
7753 SUBREG_PROMOTED_VAR_P (op0) = 0;
7758 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7759 if (GET_MODE (op0) == mode)
7762 /* If OP0 is a constant, just convert it into the proper mode. */
7763 if (CONSTANT_P (op0))
7765 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7766 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7768 if (modifier == EXPAND_INITIALIZER)
7769 return simplify_gen_subreg (mode, op0, inner_mode,
7770 subreg_lowpart_offset (mode,
7773 return convert_modes (mode, inner_mode, op0,
7774 TYPE_UNSIGNED (inner_type));
7777 if (modifier == EXPAND_INITIALIZER)
7778 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7782 convert_to_mode (mode, op0,
7783 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7785 convert_move (target, op0,
7786 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7789 case VIEW_CONVERT_EXPR:
7790 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7792 /* If the input and output modes are both the same, we are done.
7793 Otherwise, if neither mode is BLKmode and both are integral and within
7794 a word, we can use gen_lowpart. If neither is true, make sure the
7795 operand is in memory and convert the MEM to the new mode. */
7796 if (TYPE_MODE (type) == GET_MODE (op0))
7798 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7799 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7800 && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT
7801 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7802 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7803 op0 = gen_lowpart (TYPE_MODE (type), op0);
7804 else if (GET_CODE (op0) != MEM)
7806 /* If the operand is not a MEM, force it into memory. Since we
7807 are going to be be changing the mode of the MEM, don't call
7808 force_const_mem for constants because we don't allow pool
7809 constants to change mode. */
7810 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7812 if (TREE_ADDRESSABLE (exp))
7815 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7817 = assign_stack_temp_for_type
7818 (TYPE_MODE (inner_type),
7819 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7821 emit_move_insn (target, op0);
7825 /* At this point, OP0 is in the correct mode. If the output type is such
7826 that the operand is known to be aligned, indicate that it is.
7827 Otherwise, we need only be concerned about alignment for non-BLKmode
7829 if (GET_CODE (op0) == MEM)
7831 op0 = copy_rtx (op0);
7833 if (TYPE_ALIGN_OK (type))
7834 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7835 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7836 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7838 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7839 HOST_WIDE_INT temp_size
7840 = MAX (int_size_in_bytes (inner_type),
7841 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7842 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7843 temp_size, 0, type);
7844 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7846 if (TREE_ADDRESSABLE (exp))
7849 if (GET_MODE (op0) == BLKmode)
7850 emit_block_move (new_with_op0_mode, op0,
7851 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7852 (modifier == EXPAND_STACK_PARM
7853 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7855 emit_move_insn (new_with_op0_mode, op0);
7860 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7866 this_optab = ! unsignedp && flag_trapv
7867 && (GET_MODE_CLASS (mode) == MODE_INT)
7868 ? addv_optab : add_optab;
7870 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7871 something else, make sure we add the register to the constant and
7872 then to the other thing. This case can occur during strength
7873 reduction and doing it this way will produce better code if the
7874 frame pointer or argument pointer is eliminated.
7876 fold-const.c will ensure that the constant is always in the inner
7877 PLUS_EXPR, so the only case we need to do anything about is if
7878 sp, ap, or fp is our second argument, in which case we must swap
7879 the innermost first argument and our second argument. */
7881 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7882 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7883 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7884 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7885 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7886 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7888 tree t = TREE_OPERAND (exp, 1);
7890 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7891 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7894 /* If the result is to be ptr_mode and we are adding an integer to
7895 something, we might be forming a constant. So try to use
7896 plus_constant. If it produces a sum and we can't accept it,
7897 use force_operand. This allows P = &ARR[const] to generate
7898 efficient code on machines where a SYMBOL_REF is not a valid
7901 If this is an EXPAND_SUM call, always return the sum. */
7902 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7903 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7905 if (modifier == EXPAND_STACK_PARM)
7907 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7908 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7909 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7913 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7915 /* Use immed_double_const to ensure that the constant is
7916 truncated according to the mode of OP1, then sign extended
7917 to a HOST_WIDE_INT. Using the constant directly can result
7918 in non-canonical RTL in a 64x32 cross compile. */
7920 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7922 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7923 op1 = plus_constant (op1, INTVAL (constant_part));
7924 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7925 op1 = force_operand (op1, target);
7929 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7930 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7931 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7935 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7936 (modifier == EXPAND_INITIALIZER
7937 ? EXPAND_INITIALIZER : EXPAND_SUM));
7938 if (! CONSTANT_P (op0))
7940 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7941 VOIDmode, modifier);
7942 /* Return a PLUS if modifier says it's OK. */
7943 if (modifier == EXPAND_SUM
7944 || modifier == EXPAND_INITIALIZER)
7945 return simplify_gen_binary (PLUS, mode, op0, op1);
7948 /* Use immed_double_const to ensure that the constant is
7949 truncated according to the mode of OP1, then sign extended
7950 to a HOST_WIDE_INT. Using the constant directly can result
7951 in non-canonical RTL in a 64x32 cross compile. */
7953 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7955 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7956 op0 = plus_constant (op0, INTVAL (constant_part));
7957 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7958 op0 = force_operand (op0, target);
7963 /* No sense saving up arithmetic to be done
7964 if it's all in the wrong mode to form part of an address.
7965 And force_operand won't know whether to sign-extend or
7967 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7968 || mode != ptr_mode)
7970 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7971 subtarget, &op0, &op1, 0);
7972 if (op0 == const0_rtx)
7974 if (op1 == const0_rtx)
7979 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7980 subtarget, &op0, &op1, modifier);
7981 return simplify_gen_binary (PLUS, mode, op0, op1);
7984 /* For initializers, we are allowed to return a MINUS of two
7985 symbolic constants. Here we handle all cases when both operands
7987 /* Handle difference of two symbolic constants,
7988 for the sake of an initializer. */
7989 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7990 && really_constant_p (TREE_OPERAND (exp, 0))
7991 && really_constant_p (TREE_OPERAND (exp, 1)))
7993 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7994 NULL_RTX, &op0, &op1, modifier);
7996 /* If the last operand is a CONST_INT, use plus_constant of
7997 the negated constant. Else make the MINUS. */
7998 if (GET_CODE (op1) == CONST_INT)
7999 return plus_constant (op0, - INTVAL (op1));
8001 return gen_rtx_MINUS (mode, op0, op1);
8004 this_optab = ! unsignedp && flag_trapv
8005 && (GET_MODE_CLASS(mode) == MODE_INT)
8006 ? subv_optab : sub_optab;
8008 /* No sense saving up arithmetic to be done
8009 if it's all in the wrong mode to form part of an address.
8010 And force_operand won't know whether to sign-extend or
8012 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8013 || mode != ptr_mode)
8016 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8017 subtarget, &op0, &op1, modifier);
8019 /* Convert A - const to A + (-const). */
8020 if (GET_CODE (op1) == CONST_INT)
8022 op1 = negate_rtx (mode, op1);
8023 return simplify_gen_binary (PLUS, mode, op0, op1);
8029 /* If first operand is constant, swap them.
8030 Thus the following special case checks need only
8031 check the second operand. */
8032 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
8034 tree t1 = TREE_OPERAND (exp, 0);
8035 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
8036 TREE_OPERAND (exp, 1) = t1;
8039 /* Attempt to return something suitable for generating an
8040 indexed address, for machines that support that. */
8042 if (modifier == EXPAND_SUM && mode == ptr_mode
8043 && host_integerp (TREE_OPERAND (exp, 1), 0))
8045 tree exp1 = TREE_OPERAND (exp, 1);
8047 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8051 op0 = force_operand (op0, NULL_RTX);
8053 op0 = copy_to_mode_reg (mode, op0);
8055 return gen_rtx_MULT (mode, op0,
8056 gen_int_mode (tree_low_cst (exp1, 0),
8057 TYPE_MODE (TREE_TYPE (exp1))));
8060 if (modifier == EXPAND_STACK_PARM)
8063 /* Check for multiplying things that have been extended
8064 from a narrower type. If this machine supports multiplying
8065 in that narrower type with a result in the desired type,
8066 do it that way, and avoid the explicit type-conversion. */
8067 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
8068 && TREE_CODE (type) == INTEGER_TYPE
8069 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8070 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8071 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8072 && int_fits_type_p (TREE_OPERAND (exp, 1),
8073 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8074 /* Don't use a widening multiply if a shift will do. */
8075 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8076 > HOST_BITS_PER_WIDE_INT)
8077 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
8079 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8080 && (TYPE_PRECISION (TREE_TYPE
8081 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8082 == TYPE_PRECISION (TREE_TYPE
8084 (TREE_OPERAND (exp, 0), 0))))
8085 /* If both operands are extended, they must either both
8086 be zero-extended or both be sign-extended. */
8087 && (TYPE_UNSIGNED (TREE_TYPE
8088 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8089 == TYPE_UNSIGNED (TREE_TYPE
8091 (TREE_OPERAND (exp, 0), 0)))))))
8093 tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
8094 enum machine_mode innermode = TYPE_MODE (op0type);
8095 bool zextend_p = TYPE_UNSIGNED (op0type);
8096 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
8097 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
8099 if (mode == GET_MODE_WIDER_MODE (innermode))
8101 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
8103 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8104 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8105 TREE_OPERAND (exp, 1),
8106 NULL_RTX, &op0, &op1, 0);
8108 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8109 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8110 NULL_RTX, &op0, &op1, 0);
8113 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
8114 && innermode == word_mode)
8117 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8118 NULL_RTX, VOIDmode, 0);
8119 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8120 op1 = convert_modes (innermode, mode,
8121 expand_expr (TREE_OPERAND (exp, 1),
8122 NULL_RTX, VOIDmode, 0),
8125 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8126 NULL_RTX, VOIDmode, 0);
8127 temp = expand_binop (mode, other_optab, op0, op1, target,
8128 unsignedp, OPTAB_LIB_WIDEN);
8129 hipart = gen_highpart (innermode, temp);
8130 htem = expand_mult_highpart_adjust (innermode, hipart,
8134 emit_move_insn (hipart, htem);
8139 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8140 subtarget, &op0, &op1, 0);
8141 return expand_mult (mode, op0, op1, target, unsignedp);
8143 case TRUNC_DIV_EXPR:
8144 case FLOOR_DIV_EXPR:
8146 case ROUND_DIV_EXPR:
8147 case EXACT_DIV_EXPR:
8148 if (modifier == EXPAND_STACK_PARM)
8150 /* Possible optimization: compute the dividend with EXPAND_SUM
8151 then if the divisor is constant can optimize the case
8152 where some terms of the dividend have coeffs divisible by it. */
8153 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8154 subtarget, &op0, &op1, 0);
8155 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8158 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
8159 expensive divide. If not, combine will rebuild the original
8161 if (flag_unsafe_math_optimizations && optimize && !optimize_size
8162 && TREE_CODE (type) == REAL_TYPE
8163 && !real_onep (TREE_OPERAND (exp, 0)))
8164 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
8165 build (RDIV_EXPR, type,
8166 build_real (type, dconst1),
8167 TREE_OPERAND (exp, 1))),
8168 target, tmode, modifier);
8169 this_optab = sdiv_optab;
8172 case TRUNC_MOD_EXPR:
8173 case FLOOR_MOD_EXPR:
8175 case ROUND_MOD_EXPR:
8176 if (modifier == EXPAND_STACK_PARM)
8178 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8179 subtarget, &op0, &op1, 0);
8180 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8182 case FIX_ROUND_EXPR:
8183 case FIX_FLOOR_EXPR:
8185 abort (); /* Not used for C. */
8187 case FIX_TRUNC_EXPR:
8188 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8189 if (target == 0 || modifier == EXPAND_STACK_PARM)
8190 target = gen_reg_rtx (mode);
8191 expand_fix (target, op0, unsignedp);
8195 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8196 if (target == 0 || modifier == EXPAND_STACK_PARM)
8197 target = gen_reg_rtx (mode);
8198 /* expand_float can't figure out what to do if FROM has VOIDmode.
8199 So give it the correct mode. With -O, cse will optimize this. */
8200 if (GET_MODE (op0) == VOIDmode)
8201 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8203 expand_float (target, op0,
8204 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8208 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8209 if (modifier == EXPAND_STACK_PARM)
8211 temp = expand_unop (mode,
8212 ! unsignedp && flag_trapv
8213 && (GET_MODE_CLASS(mode) == MODE_INT)
8214 ? negv_optab : neg_optab, op0, target, 0);
8220 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8221 if (modifier == EXPAND_STACK_PARM)
8224 /* ABS_EXPR is not valid for complex arguments. */
8225 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
8226 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
8229 /* Unsigned abs is simply the operand. Testing here means we don't
8230 risk generating incorrect code below. */
8231 if (TYPE_UNSIGNED (type))
8234 return expand_abs (mode, op0, target, unsignedp,
8235 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8239 target = original_target;
8241 || modifier == EXPAND_STACK_PARM
8242 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
8243 || GET_MODE (target) != mode
8245 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8246 target = gen_reg_rtx (mode);
8247 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8248 target, &op0, &op1, 0);
8250 /* First try to do it with a special MIN or MAX instruction.
8251 If that does not win, use a conditional jump to select the proper
8253 this_optab = (unsignedp
8254 ? (code == MIN_EXPR ? umin_optab : umax_optab)
8255 : (code == MIN_EXPR ? smin_optab : smax_optab));
8257 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8262 /* At this point, a MEM target is no longer useful; we will get better
8265 if (GET_CODE (target) == MEM)
8266 target = gen_reg_rtx (mode);
8268 /* If op1 was placed in target, swap op0 and op1. */
8269 if (target != op0 && target == op1)
8277 emit_move_insn (target, op0);
8279 op0 = gen_label_rtx ();
8281 /* If this mode is an integer too wide to compare properly,
8282 compare word by word. Rely on cse to optimize constant cases. */
8283 if (GET_MODE_CLASS (mode) == MODE_INT
8284 && ! can_compare_p (GE, mode, ccp_jump))
8286 if (code == MAX_EXPR)
8287 do_jump_by_parts_greater_rtx (mode, unsignedp, target, op1,
8290 do_jump_by_parts_greater_rtx (mode, unsignedp, op1, target,
8295 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
8296 unsignedp, mode, NULL_RTX, NULL_RTX, op0);
8298 emit_move_insn (target, op1);
8303 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8304 if (modifier == EXPAND_STACK_PARM)
8306 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8311 /* ??? Can optimize bitwise operations with one arg constant.
8312 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8313 and (a bitwise1 b) bitwise2 b (etc)
8314 but that is probably not worth while. */
8316 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8317 boolean values when we want in all cases to compute both of them. In
8318 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8319 as actual zero-or-1 values and then bitwise anding. In cases where
8320 there cannot be any side effects, better code would be made by
8321 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8322 how to recognize those cases. */
8324 case TRUTH_AND_EXPR:
8326 this_optab = and_optab;
8331 this_optab = ior_optab;
8334 case TRUTH_XOR_EXPR:
8336 this_optab = xor_optab;
8343 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8345 if (modifier == EXPAND_STACK_PARM)
8347 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8348 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8351 /* Could determine the answer when only additive constants differ. Also,
8352 the addition of one can be handled by changing the condition. */
8359 case UNORDERED_EXPR:
8367 temp = do_store_flag (exp,
8368 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8369 tmode != VOIDmode ? tmode : mode, 0);
8373 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8374 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8376 && REG_P (original_target)
8377 && (GET_MODE (original_target)
8378 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8380 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8383 /* If temp is constant, we can just compute the result. */
8384 if (GET_CODE (temp) == CONST_INT)
8386 if (INTVAL (temp) != 0)
8387 emit_move_insn (target, const1_rtx);
8389 emit_move_insn (target, const0_rtx);
8394 if (temp != original_target)
8396 enum machine_mode mode1 = GET_MODE (temp);
8397 if (mode1 == VOIDmode)
8398 mode1 = tmode != VOIDmode ? tmode : mode;
8400 temp = copy_to_mode_reg (mode1, temp);
8403 op1 = gen_label_rtx ();
8404 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8405 GET_MODE (temp), unsignedp, op1);
8406 emit_move_insn (temp, const1_rtx);
8411 /* If no set-flag instruction, must generate a conditional
8412 store into a temporary variable. Drop through
8413 and handle this like && and ||. */
8415 case TRUTH_ANDIF_EXPR:
8416 case TRUTH_ORIF_EXPR:
8419 || modifier == EXPAND_STACK_PARM
8420 || ! safe_from_p (target, exp, 1)
8421 /* Make sure we don't have a hard reg (such as function's return
8422 value) live across basic blocks, if not optimizing. */
8423 || (!optimize && REG_P (target)
8424 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8425 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8428 emit_clr_insn (target);
8430 op1 = gen_label_rtx ();
8431 jumpifnot (exp, op1);
8434 emit_0_to_1_insn (target);
8437 return ignore ? const0_rtx : target;
8439 case TRUTH_NOT_EXPR:
8440 if (modifier == EXPAND_STACK_PARM)
8442 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8443 /* The parser is careful to generate TRUTH_NOT_EXPR
8444 only with operands that are always zero or one. */
8445 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8446 target, 1, OPTAB_LIB_WIDEN);
8452 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8454 return expand_expr_real (TREE_OPERAND (exp, 1),
8455 (ignore ? const0_rtx : target),
8456 VOIDmode, modifier, alt_rtl);
8458 case STATEMENT_LIST:
8460 tree_stmt_iterator iter;
8465 for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
8466 expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
8471 /* If it's void, we don't need to worry about computing a value. */
8472 if (VOID_TYPE_P (TREE_TYPE (exp)))
8474 tree pred = TREE_OPERAND (exp, 0);
8475 tree then_ = TREE_OPERAND (exp, 1);
8476 tree else_ = TREE_OPERAND (exp, 2);
8478 /* If we do not have any pending cleanups or stack_levels
8479 to restore, and at least one arm of the COND_EXPR is a
8480 GOTO_EXPR to a local label, then we can emit more efficient
8481 code by using jumpif/jumpifnot instead of the 'if' machinery. */
8483 || containing_blocks_have_cleanups_or_stack_level ())
8485 else if (TREE_CODE (then_) == GOTO_EXPR
8486 && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL)
8488 jumpif (pred, label_rtx (GOTO_DESTINATION (then_)));
8489 return expand_expr (else_, const0_rtx, VOIDmode, 0);
8491 else if (TREE_CODE (else_) == GOTO_EXPR
8492 && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL)
8494 jumpifnot (pred, label_rtx (GOTO_DESTINATION (else_)));
8495 return expand_expr (then_, const0_rtx, VOIDmode, 0);
8498 /* Just use the 'if' machinery. */
8499 expand_start_cond (pred, 0);
8500 start_cleanup_deferral ();
8501 expand_expr (then_, const0_rtx, VOIDmode, 0);
8505 /* Iterate over 'else if's instead of recursing. */
8506 for (; TREE_CODE (exp) == COND_EXPR; exp = TREE_OPERAND (exp, 2))
8508 expand_start_else ();
8509 if (EXPR_HAS_LOCATION (exp))
8511 emit_line_note (EXPR_LOCATION (exp));
8512 if (cfun->dont_emit_block_notes)
8513 record_block_change (TREE_BLOCK (exp));
8515 expand_elseif (TREE_OPERAND (exp, 0));
8516 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, 0);
8518 /* Don't emit the jump and label if there's no 'else' clause. */
8519 if (TREE_SIDE_EFFECTS (exp))
8521 expand_start_else ();
8522 expand_expr (exp, const0_rtx, VOIDmode, 0);
8524 end_cleanup_deferral ();
8529 /* If we would have a "singleton" (see below) were it not for a
8530 conversion in each arm, bring that conversion back out. */
8531 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8532 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8533 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8534 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8536 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8537 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8539 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8540 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8541 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8542 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8543 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8544 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8545 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8546 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8547 return expand_expr (build1 (NOP_EXPR, type,
8548 build (COND_EXPR, TREE_TYPE (iftrue),
8549 TREE_OPERAND (exp, 0),
8551 target, tmode, modifier);
8555 /* Note that COND_EXPRs whose type is a structure or union
8556 are required to be constructed to contain assignments of
8557 a temporary variable, so that we can evaluate them here
8558 for side effect only. If type is void, we must do likewise. */
8560 /* If an arm of the branch requires a cleanup,
8561 only that cleanup is performed. */
8564 tree binary_op = 0, unary_op = 0;
8566 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8567 convert it to our mode, if necessary. */
8568 if (integer_onep (TREE_OPERAND (exp, 1))
8569 && integer_zerop (TREE_OPERAND (exp, 2))
8570 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8574 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8579 if (modifier == EXPAND_STACK_PARM)
8581 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
8582 if (GET_MODE (op0) == mode)
8586 target = gen_reg_rtx (mode);
8587 convert_move (target, op0, unsignedp);
8591 /* Check for X ? A + B : A. If we have this, we can copy A to the
8592 output and conditionally add B. Similarly for unary operations.
8593 Don't do this if X has side-effects because those side effects
8594 might affect A or B and the "?" operation is a sequence point in
8595 ANSI. (operand_equal_p tests for side effects.) */
8597 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8598 && operand_equal_p (TREE_OPERAND (exp, 2),
8599 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8600 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8601 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8602 && operand_equal_p (TREE_OPERAND (exp, 1),
8603 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8604 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8605 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8606 && operand_equal_p (TREE_OPERAND (exp, 2),
8607 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8608 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8609 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8610 && operand_equal_p (TREE_OPERAND (exp, 1),
8611 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8612 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8614 /* If we are not to produce a result, we have no target. Otherwise,
8615 if a target was specified use it; it will not be used as an
8616 intermediate target unless it is safe. If no target, use a
8621 else if (modifier == EXPAND_STACK_PARM)
8622 temp = assign_temp (type, 0, 0, 1);
8623 else if (original_target
8624 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8625 || (singleton && REG_P (original_target)
8626 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8627 && original_target == var_rtx (singleton)))
8628 && GET_MODE (original_target) == mode
8629 #ifdef HAVE_conditional_move
8630 && (! can_conditionally_move_p (mode)
8631 || REG_P (original_target)
8632 || TREE_ADDRESSABLE (type))
8634 && (GET_CODE (original_target) != MEM
8635 || TREE_ADDRESSABLE (type)))
8636 temp = original_target;
8637 else if (TREE_ADDRESSABLE (type))
8640 temp = assign_temp (type, 0, 0, 1);
8642 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8643 do the test of X as a store-flag operation, do this as
8644 A + ((X != 0) << log C). Similarly for other simple binary
8645 operators. Only do for C == 1 if BRANCH_COST is low. */
8646 if (temp && singleton && binary_op
8647 && (TREE_CODE (binary_op) == PLUS_EXPR
8648 || TREE_CODE (binary_op) == MINUS_EXPR
8649 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8650 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8651 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8652 : integer_onep (TREE_OPERAND (binary_op, 1)))
8653 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8657 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8658 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8659 ? addv_optab : add_optab)
8660 : TREE_CODE (binary_op) == MINUS_EXPR
8661 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8662 ? subv_optab : sub_optab)
8663 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8666 /* If we had X ? A : A + 1, do this as A + (X == 0). */
8667 if (singleton == TREE_OPERAND (exp, 1))
8668 cond = invert_truthvalue (TREE_OPERAND (exp, 0));
8670 cond = TREE_OPERAND (exp, 0);
8672 result = do_store_flag (cond, (safe_from_p (temp, singleton, 1)
8674 mode, BRANCH_COST <= 1);
8676 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8677 result = expand_shift (LSHIFT_EXPR, mode, result,
8678 build_int_2 (tree_log2
8682 (safe_from_p (temp, singleton, 1)
8683 ? temp : NULL_RTX), 0);
8687 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8688 return expand_binop (mode, boptab, op1, result, temp,
8689 unsignedp, OPTAB_LIB_WIDEN);
8693 do_pending_stack_adjust ();
8695 op0 = gen_label_rtx ();
8697 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8701 /* If the target conflicts with the other operand of the
8702 binary op, we can't use it. Also, we can't use the target
8703 if it is a hard register, because evaluating the condition
8704 might clobber it. */
8706 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8708 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8709 temp = gen_reg_rtx (mode);
8710 store_expr (singleton, temp,
8711 modifier == EXPAND_STACK_PARM ? 2 : 0);
8714 expand_expr (singleton,
8715 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8716 if (singleton == TREE_OPERAND (exp, 1))
8717 jumpif (TREE_OPERAND (exp, 0), op0);
8719 jumpifnot (TREE_OPERAND (exp, 0), op0);
8721 start_cleanup_deferral ();
8722 if (binary_op && temp == 0)
8723 /* Just touch the other operand. */
8724 expand_expr (TREE_OPERAND (binary_op, 1),
8725 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8727 store_expr (build (TREE_CODE (binary_op), type,
8728 make_tree (type, temp),
8729 TREE_OPERAND (binary_op, 1)),
8730 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8732 store_expr (build1 (TREE_CODE (unary_op), type,
8733 make_tree (type, temp)),
8734 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8737 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8738 comparison operator. If we have one of these cases, set the
8739 output to A, branch on A (cse will merge these two references),
8740 then set the output to FOO. */
8742 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8743 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8744 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8745 TREE_OPERAND (exp, 1), 0)
8746 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8747 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8748 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8751 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8752 temp = gen_reg_rtx (mode);
8753 store_expr (TREE_OPERAND (exp, 1), temp,
8754 modifier == EXPAND_STACK_PARM ? 2 : 0);
8755 jumpif (TREE_OPERAND (exp, 0), op0);
8757 start_cleanup_deferral ();
8758 if (TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8759 store_expr (TREE_OPERAND (exp, 2), temp,
8760 modifier == EXPAND_STACK_PARM ? 2 : 0);
8762 expand_expr (TREE_OPERAND (exp, 2),
8763 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8767 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8768 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8769 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8770 TREE_OPERAND (exp, 2), 0)
8771 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8772 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8773 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8776 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8777 temp = gen_reg_rtx (mode);
8778 store_expr (TREE_OPERAND (exp, 2), temp,
8779 modifier == EXPAND_STACK_PARM ? 2 : 0);
8780 jumpifnot (TREE_OPERAND (exp, 0), op0);
8782 start_cleanup_deferral ();
8783 if (TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8784 store_expr (TREE_OPERAND (exp, 1), temp,
8785 modifier == EXPAND_STACK_PARM ? 2 : 0);
8787 expand_expr (TREE_OPERAND (exp, 1),
8788 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8793 op1 = gen_label_rtx ();
8794 jumpifnot (TREE_OPERAND (exp, 0), op0);
8796 start_cleanup_deferral ();
8798 /* One branch of the cond can be void, if it never returns. For
8799 example A ? throw : E */
8801 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8802 store_expr (TREE_OPERAND (exp, 1), temp,
8803 modifier == EXPAND_STACK_PARM ? 2 : 0);
8805 expand_expr (TREE_OPERAND (exp, 1),
8806 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8807 end_cleanup_deferral ();
8809 emit_jump_insn (gen_jump (op1));
8812 start_cleanup_deferral ();
8814 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8815 store_expr (TREE_OPERAND (exp, 2), temp,
8816 modifier == EXPAND_STACK_PARM ? 2 : 0);
8818 expand_expr (TREE_OPERAND (exp, 2),
8819 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8822 end_cleanup_deferral ();
8833 /* Something needs to be initialized, but we didn't know
8834 where that thing was when building the tree. For example,
8835 it could be the return value of a function, or a parameter
8836 to a function which lays down in the stack, or a temporary
8837 variable which must be passed by reference.
8839 We guarantee that the expression will either be constructed
8840 or copied into our original target. */
8842 tree slot = TREE_OPERAND (exp, 0);
8843 tree cleanups = NULL_TREE;
8846 if (TREE_CODE (slot) != VAR_DECL)
8850 target = original_target;
8852 /* Set this here so that if we get a target that refers to a
8853 register variable that's already been used, put_reg_into_stack
8854 knows that it should fix up those uses. */
8855 TREE_USED (slot) = 1;
8859 if (DECL_RTL_SET_P (slot))
8861 target = DECL_RTL (slot);
8862 /* If we have already expanded the slot, so don't do
8864 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8869 target = assign_temp (type, 2, 0, 1);
8870 SET_DECL_RTL (slot, target);
8871 if (TREE_ADDRESSABLE (slot))
8872 put_var_into_stack (slot, /*rescan=*/false);
8874 /* Since SLOT is not known to the called function
8875 to belong to its stack frame, we must build an explicit
8876 cleanup. This case occurs when we must build up a reference
8877 to pass the reference as an argument. In this case,
8878 it is very likely that such a reference need not be
8881 if (TREE_OPERAND (exp, 2) == 0)
8882 TREE_OPERAND (exp, 2)
8883 = lang_hooks.maybe_build_cleanup (slot);
8884 cleanups = TREE_OPERAND (exp, 2);
8889 /* This case does occur, when expanding a parameter which
8890 needs to be constructed on the stack. The target
8891 is the actual stack address that we want to initialize.
8892 The function we call will perform the cleanup in this case. */
8894 /* If we have already assigned it space, use that space,
8895 not target that we were passed in, as our target
8896 parameter is only a hint. */
8897 if (DECL_RTL_SET_P (slot))
8899 target = DECL_RTL (slot);
8900 /* If we have already expanded the slot, so don't do
8902 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8907 SET_DECL_RTL (slot, target);
8908 /* If we must have an addressable slot, then make sure that
8909 the RTL that we just stored in slot is OK. */
8910 if (TREE_ADDRESSABLE (slot))
8911 put_var_into_stack (slot, /*rescan=*/true);
8915 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8916 /* Mark it as expanded. */
8917 TREE_OPERAND (exp, 1) = NULL_TREE;
8919 if (VOID_TYPE_P (TREE_TYPE (exp1)))
8920 /* If the initializer is void, just expand it; it will initialize
8921 the object directly. */
8922 expand_expr (exp1, const0_rtx, VOIDmode, 0);
8924 store_expr (exp1, target, modifier == EXPAND_STACK_PARM ? 2 : 0);
8926 expand_decl_cleanup_eh (NULL_TREE, cleanups, CLEANUP_EH_ONLY (exp));
8933 tree lhs = TREE_OPERAND (exp, 0);
8934 tree rhs = TREE_OPERAND (exp, 1);
8936 temp = expand_assignment (lhs, rhs, ! ignore);
8942 /* If lhs is complex, expand calls in rhs before computing it.
8943 That's so we don't compute a pointer and save it over a
8944 call. If lhs is simple, compute it first so we can give it
8945 as a target if the rhs is just a call. This avoids an
8946 extra temp and copy and that prevents a partial-subsumption
8947 which makes bad code. Actually we could treat
8948 component_ref's of vars like vars. */
8950 tree lhs = TREE_OPERAND (exp, 0);
8951 tree rhs = TREE_OPERAND (exp, 1);
8955 /* Check for |= or &= of a bitfield of size one into another bitfield
8956 of size 1. In this case, (unless we need the result of the
8957 assignment) we can do this more efficiently with a
8958 test followed by an assignment, if necessary.
8960 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8961 things change so we do, this code should be enhanced to
8964 && TREE_CODE (lhs) == COMPONENT_REF
8965 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8966 || TREE_CODE (rhs) == BIT_AND_EXPR)
8967 && TREE_OPERAND (rhs, 0) == lhs
8968 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8969 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8970 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8972 rtx label = gen_label_rtx ();
8974 do_jump (TREE_OPERAND (rhs, 1),
8975 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8976 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8977 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8978 (TREE_CODE (rhs) == BIT_IOR_EXPR
8980 : integer_zero_node)),
8982 do_pending_stack_adjust ();
8987 temp = expand_assignment (lhs, rhs, ! ignore);
8993 if (!TREE_OPERAND (exp, 0))
8994 expand_null_return ();
8996 expand_return (TREE_OPERAND (exp, 0));
8999 case PREINCREMENT_EXPR:
9000 case PREDECREMENT_EXPR:
9001 return expand_increment (exp, 0, ignore);
9003 case POSTINCREMENT_EXPR:
9004 case POSTDECREMENT_EXPR:
9005 /* Faster to treat as pre-increment if result is not used. */
9006 return expand_increment (exp, ! ignore, ignore);
9009 if (modifier == EXPAND_STACK_PARM)
9011 /* If we are taking the address of something erroneous, just
9013 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
9015 /* If we are taking the address of a constant and are at the
9016 top level, we have to use output_constant_def since we can't
9017 call force_const_mem at top level. */
9019 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
9020 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
9022 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
9025 /* We make sure to pass const0_rtx down if we came in with
9026 ignore set, to avoid doing the cleanups twice for something. */
9027 op0 = expand_expr (TREE_OPERAND (exp, 0),
9028 ignore ? const0_rtx : NULL_RTX, VOIDmode,
9029 (modifier == EXPAND_INITIALIZER
9030 ? modifier : EXPAND_CONST_ADDRESS));
9032 /* If we are going to ignore the result, OP0 will have been set
9033 to const0_rtx, so just return it. Don't get confused and
9034 think we are taking the address of the constant. */
9038 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
9039 clever and returns a REG when given a MEM. */
9040 op0 = protect_from_queue (op0, 1);
9042 /* We would like the object in memory. If it is a constant, we can
9043 have it be statically allocated into memory. For a non-constant,
9044 we need to allocate some memory and store the value into it. */
9046 if (CONSTANT_P (op0))
9047 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
9049 else if (REG_P (op0) || GET_CODE (op0) == SUBREG
9050 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
9051 || GET_CODE (op0) == PARALLEL || GET_CODE (op0) == LO_SUM)
9053 /* If the operand is a SAVE_EXPR, we can deal with this by
9054 forcing the SAVE_EXPR into memory. */
9055 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
9057 put_var_into_stack (TREE_OPERAND (exp, 0),
9059 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
9063 /* If this object is in a register, it can't be BLKmode. */
9064 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9065 rtx memloc = assign_temp (inner_type, 1, 1, 1);
9067 if (GET_CODE (op0) == PARALLEL)
9068 /* Handle calls that pass values in multiple
9069 non-contiguous locations. The Irix 6 ABI has examples
9071 emit_group_store (memloc, op0, inner_type,
9072 int_size_in_bytes (inner_type));
9074 emit_move_insn (memloc, op0);
9080 if (GET_CODE (op0) != MEM)
9083 mark_temp_addr_taken (op0);
9084 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
9086 op0 = XEXP (op0, 0);
9087 if (GET_MODE (op0) == Pmode && mode == ptr_mode)
9088 op0 = convert_memory_address (ptr_mode, op0);
9092 /* If OP0 is not aligned as least as much as the type requires, we
9093 need to make a temporary, copy OP0 to it, and take the address of
9094 the temporary. We want to use the alignment of the type, not of
9095 the operand. Note that this is incorrect for FUNCTION_TYPE, but
9096 the test for BLKmode means that can't happen. The test for
9097 BLKmode is because we never make mis-aligned MEMs with
9100 We don't need to do this at all if the machine doesn't have
9101 strict alignment. */
9102 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
9103 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
9105 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
9107 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9110 if (TYPE_ALIGN_OK (inner_type))
9113 if (TREE_ADDRESSABLE (inner_type))
9115 /* We can't make a bitwise copy of this object, so fail. */
9116 error ("cannot take the address of an unaligned member");
9120 new = assign_stack_temp_for_type
9121 (TYPE_MODE (inner_type),
9122 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
9123 : int_size_in_bytes (inner_type),
9124 1, build_qualified_type (inner_type,
9125 (TYPE_QUALS (inner_type)
9126 | TYPE_QUAL_CONST)));
9128 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)),
9129 (modifier == EXPAND_STACK_PARM
9130 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
9135 op0 = force_operand (XEXP (op0, 0), target);
9140 && modifier != EXPAND_CONST_ADDRESS
9141 && modifier != EXPAND_INITIALIZER
9142 && modifier != EXPAND_SUM)
9143 op0 = force_reg (Pmode, op0);
9146 && ! REG_USERVAR_P (op0))
9147 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
9149 if (GET_MODE (op0) == Pmode && mode == ptr_mode)
9150 op0 = convert_memory_address (ptr_mode, op0);
9154 case ENTRY_VALUE_EXPR:
9157 /* COMPLEX type for Extended Pascal & Fortran */
9160 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9163 /* Get the rtx code of the operands. */
9164 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9165 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
9168 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
9172 /* Move the real (op0) and imaginary (op1) parts to their location. */
9173 emit_move_insn (gen_realpart (mode, target), op0);
9174 emit_move_insn (gen_imagpart (mode, target), op1);
9176 insns = get_insns ();
9179 /* Complex construction should appear as a single unit. */
9180 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
9181 each with a separate pseudo as destination.
9182 It's not correct for flow to treat them as a unit. */
9183 if (GET_CODE (target) != CONCAT)
9184 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
9192 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9193 return gen_realpart (mode, op0);
9196 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9197 return gen_imagpart (mode, op0);
9201 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9205 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9208 target = gen_reg_rtx (mode);
9212 /* Store the realpart and the negated imagpart to target. */
9213 emit_move_insn (gen_realpart (partmode, target),
9214 gen_realpart (partmode, op0));
9216 imag_t = gen_imagpart (partmode, target);
9217 temp = expand_unop (partmode,
9218 ! unsignedp && flag_trapv
9219 && (GET_MODE_CLASS(partmode) == MODE_INT)
9220 ? negv_optab : neg_optab,
9221 gen_imagpart (partmode, op0), imag_t, 0);
9223 emit_move_insn (imag_t, temp);
9225 insns = get_insns ();
9228 /* Conjugate should appear as a single unit
9229 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
9230 each with a separate pseudo as destination.
9231 It's not correct for flow to treat them as a unit. */
9232 if (GET_CODE (target) != CONCAT)
9233 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
9241 expand_resx_expr (exp);
9244 case TRY_CATCH_EXPR:
9246 tree handler = TREE_OPERAND (exp, 1);
9248 expand_eh_region_start ();
9249 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9250 expand_eh_handler (handler);
9256 expand_start_catch (CATCH_TYPES (exp));
9257 expand_expr (CATCH_BODY (exp), const0_rtx, VOIDmode, 0);
9258 expand_end_catch ();
9261 case EH_FILTER_EXPR:
9262 /* Should have been handled in expand_eh_handler. */
9265 case TRY_FINALLY_EXPR:
9267 tree try_block = TREE_OPERAND (exp, 0);
9268 tree finally_block = TREE_OPERAND (exp, 1);
9270 if ((!optimize && lang_protect_cleanup_actions == NULL)
9271 || unsafe_for_reeval (finally_block) > 1)
9273 /* In this case, wrapping FINALLY_BLOCK in an UNSAVE_EXPR
9274 is not sufficient, so we cannot expand the block twice.
9275 So we play games with GOTO_SUBROUTINE_EXPR to let us
9276 expand the thing only once. */
9277 /* When not optimizing, we go ahead with this form since
9278 (1) user breakpoints operate more predictably without
9279 code duplication, and
9280 (2) we're not running any of the global optimizers
9281 that would explode in time/space with the highly
9282 connected CFG created by the indirect branching. */
9284 rtx finally_label = gen_label_rtx ();
9285 rtx done_label = gen_label_rtx ();
9286 rtx return_link = gen_reg_rtx (Pmode);
9287 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
9288 (tree) finally_label, (tree) return_link);
9289 TREE_SIDE_EFFECTS (cleanup) = 1;
9291 /* Start a new binding layer that will keep track of all cleanup
9292 actions to be performed. */
9293 expand_start_bindings (2);
9294 target_temp_slot_level = temp_slot_level;
9296 expand_decl_cleanup (NULL_TREE, cleanup);
9297 op0 = expand_expr (try_block, target, tmode, modifier);
9299 preserve_temp_slots (op0);
9300 expand_end_bindings (NULL_TREE, 0, 0);
9301 emit_jump (done_label);
9302 emit_label (finally_label);
9303 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
9304 emit_indirect_jump (return_link);
9305 emit_label (done_label);
9309 expand_start_bindings (2);
9310 target_temp_slot_level = temp_slot_level;
9312 expand_decl_cleanup (NULL_TREE, finally_block);
9313 op0 = expand_expr (try_block, target, tmode, modifier);
9315 preserve_temp_slots (op0);
9316 expand_end_bindings (NULL_TREE, 0, 0);
9322 case GOTO_SUBROUTINE_EXPR:
9324 rtx subr = (rtx) TREE_OPERAND (exp, 0);
9325 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
9326 rtx return_address = gen_label_rtx ();
9327 emit_move_insn (return_link,
9328 gen_rtx_LABEL_REF (Pmode, return_address));
9330 emit_label (return_address);
9335 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
9338 return get_exception_pointer (cfun);
9341 return get_exception_filter (cfun);
9344 /* Function descriptors are not valid except for as
9345 initialization constants, and should not be expanded. */
9349 expand_start_case (0, SWITCH_COND (exp), integer_type_node,
9351 if (SWITCH_BODY (exp))
9352 expand_expr_stmt (SWITCH_BODY (exp));
9353 if (SWITCH_LABELS (exp))
9356 tree vec = SWITCH_LABELS (exp);
9357 size_t i, n = TREE_VEC_LENGTH (vec);
9359 for (i = 0; i < n; ++i)
9361 tree elt = TREE_VEC_ELT (vec, i);
9362 tree controlling_expr_type = TREE_TYPE (SWITCH_COND (exp));
9363 tree min_value = TYPE_MIN_VALUE (controlling_expr_type);
9364 tree max_value = TYPE_MAX_VALUE (controlling_expr_type);
9366 tree case_low = CASE_LOW (elt);
9367 tree case_high = CASE_HIGH (elt) ? CASE_HIGH (elt) : case_low;
9368 if (case_low && case_high)
9370 /* Case label is less than minimum for type. */
9371 if ((tree_int_cst_compare (case_low, min_value) < 0)
9372 && (tree_int_cst_compare (case_high, min_value) < 0))
9374 warning ("case label value %d is less than minimum value for type",
9375 TREE_INT_CST (case_low));
9379 /* Case value is greater than maximum for type. */
9380 if ((tree_int_cst_compare (case_low, max_value) > 0)
9381 && (tree_int_cst_compare (case_high, max_value) > 0))
9383 warning ("case label value %d exceeds maximum value for type",
9384 TREE_INT_CST (case_high));
9388 /* Saturate lower case label value to minimum. */
9389 if ((tree_int_cst_compare (case_high, min_value) >= 0)
9390 && (tree_int_cst_compare (case_low, min_value) < 0))
9392 warning ("lower value %d in case label range less than minimum value for type",
9393 TREE_INT_CST (case_low));
9394 case_low = min_value;
9397 /* Saturate upper case label value to maximum. */
9398 if ((tree_int_cst_compare (case_low, max_value) <= 0)
9399 && (tree_int_cst_compare (case_high, max_value) > 0))
9401 warning ("upper value %d in case label range exceeds maximum value for type",
9402 TREE_INT_CST (case_high));
9403 case_high = max_value;
9407 add_case_node (case_low, case_high, CASE_LABEL (elt), &duplicate, true);
9412 expand_end_case_type (SWITCH_COND (exp), TREE_TYPE (exp));
9416 expand_label (TREE_OPERAND (exp, 0));
9419 case CASE_LABEL_EXPR:
9422 add_case_node (CASE_LOW (exp), CASE_HIGH (exp), CASE_LABEL (exp),
9430 expand_asm_expr (exp);
9434 return lang_hooks.expand_expr (exp, original_target, tmode,
9438 /* Here to do an ordinary binary operator, generating an instruction
9439 from the optab already placed in `this_optab'. */
9441 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
9442 subtarget, &op0, &op1, 0);
9444 if (modifier == EXPAND_STACK_PARM)
9446 temp = expand_binop (mode, this_optab, op0, op1, target,
9447 unsignedp, OPTAB_LIB_WIDEN);
9453 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9454 when applied to the address of EXP produces an address known to be
9455 aligned more than BIGGEST_ALIGNMENT. */
9458 is_aligning_offset (tree offset, tree exp)
9460 /* Strip off any conversions. */
9461 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9462 || TREE_CODE (offset) == NOP_EXPR
9463 || TREE_CODE (offset) == CONVERT_EXPR)
9464 offset = TREE_OPERAND (offset, 0);
9466 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9467 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9468 if (TREE_CODE (offset) != BIT_AND_EXPR
9469 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9470 || compare_tree_int (TREE_OPERAND (offset, 1),
9471 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
9472 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9475 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9476 It must be NEGATE_EXPR. Then strip any more conversions. */
9477 offset = TREE_OPERAND (offset, 0);
9478 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9479 || TREE_CODE (offset) == NOP_EXPR
9480 || TREE_CODE (offset) == CONVERT_EXPR)
9481 offset = TREE_OPERAND (offset, 0);
9483 if (TREE_CODE (offset) != NEGATE_EXPR)
9486 offset = TREE_OPERAND (offset, 0);
9487 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9488 || TREE_CODE (offset) == NOP_EXPR
9489 || TREE_CODE (offset) == CONVERT_EXPR)
9490 offset = TREE_OPERAND (offset, 0);
9492 /* This must now be the address of EXP. */
9493 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
9496 /* Return the tree node if an ARG corresponds to a string constant or zero
9497 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9498 in bytes within the string that ARG is accessing. The type of the
9499 offset will be `sizetype'. */
9502 string_constant (tree arg, tree *ptr_offset)
9506 if (TREE_CODE (arg) == ADDR_EXPR
9507 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9509 *ptr_offset = size_zero_node;
9510 return TREE_OPERAND (arg, 0);
9512 if (TREE_CODE (arg) == ADDR_EXPR
9513 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
9514 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg, 0), 0)) == STRING_CST)
9516 *ptr_offset = convert (sizetype, TREE_OPERAND (TREE_OPERAND (arg, 0), 1));
9517 return TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
9519 else if (TREE_CODE (arg) == PLUS_EXPR)
9521 tree arg0 = TREE_OPERAND (arg, 0);
9522 tree arg1 = TREE_OPERAND (arg, 1);
9527 if (TREE_CODE (arg0) == ADDR_EXPR
9528 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9530 *ptr_offset = convert (sizetype, arg1);
9531 return TREE_OPERAND (arg0, 0);
9533 else if (TREE_CODE (arg1) == ADDR_EXPR
9534 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9536 *ptr_offset = convert (sizetype, arg0);
9537 return TREE_OPERAND (arg1, 0);
9544 /* Expand code for a post- or pre- increment or decrement
9545 and return the RTX for the result.
9546 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9549 expand_increment (tree exp, int post, int ignore)
9553 tree incremented = TREE_OPERAND (exp, 0);
9554 optab this_optab = add_optab;
9556 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9557 int op0_is_copy = 0;
9558 int single_insn = 0;
9559 /* 1 means we can't store into OP0 directly,
9560 because it is a subreg narrower than a word,
9561 and we don't dare clobber the rest of the word. */
9564 /* Stabilize any component ref that might need to be
9565 evaluated more than once below. */
9567 || TREE_CODE (incremented) == BIT_FIELD_REF
9568 || (TREE_CODE (incremented) == COMPONENT_REF
9569 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9570 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9571 incremented = stabilize_reference (incremented);
9572 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9573 ones into save exprs so that they don't accidentally get evaluated
9574 more than once by the code below. */
9575 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9576 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9577 incremented = save_expr (incremented);
9579 /* Compute the operands as RTX.
9580 Note whether OP0 is the actual lvalue or a copy of it:
9581 I believe it is a copy iff it is a register or subreg
9582 and insns were generated in computing it. */
9584 temp = get_last_insn ();
9585 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
9587 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9588 in place but instead must do sign- or zero-extension during assignment,
9589 so we copy it into a new register and let the code below use it as
9592 Note that we can safely modify this SUBREG since it is know not to be
9593 shared (it was made by the expand_expr call above). */
9595 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9598 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9602 else if (GET_CODE (op0) == SUBREG
9603 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9605 /* We cannot increment this SUBREG in place. If we are
9606 post-incrementing, get a copy of the old value. Otherwise,
9607 just mark that we cannot increment in place. */
9609 op0 = copy_to_reg (op0);
9614 op0_is_copy = ((GET_CODE (op0) == SUBREG || REG_P (op0))
9615 && temp != get_last_insn ());
9616 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9618 /* Decide whether incrementing or decrementing. */
9619 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9620 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9621 this_optab = sub_optab;
9623 /* Convert decrement by a constant into a negative increment. */
9624 if (this_optab == sub_optab
9625 && GET_CODE (op1) == CONST_INT)
9627 op1 = GEN_INT (-INTVAL (op1));
9628 this_optab = add_optab;
9631 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9632 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9634 /* For a preincrement, see if we can do this with a single instruction. */
9637 icode = (int) this_optab->handlers[(int) mode].insn_code;
9638 if (icode != (int) CODE_FOR_nothing
9639 /* Make sure that OP0 is valid for operands 0 and 1
9640 of the insn we want to queue. */
9641 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9642 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9643 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9647 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9648 then we cannot just increment OP0. We must therefore contrive to
9649 increment the original value. Then, for postincrement, we can return
9650 OP0 since it is a copy of the old value. For preincrement, expand here
9651 unless we can do it with a single insn.
9653 Likewise if storing directly into OP0 would clobber high bits
9654 we need to preserve (bad_subreg). */
9655 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9657 /* This is the easiest way to increment the value wherever it is.
9658 Problems with multiple evaluation of INCREMENTED are prevented
9659 because either (1) it is a component_ref or preincrement,
9660 in which case it was stabilized above, or (2) it is an array_ref
9661 with constant index in an array in a register, which is
9662 safe to reevaluate. */
9663 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9664 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9665 ? MINUS_EXPR : PLUS_EXPR),
9668 TREE_OPERAND (exp, 1));
9670 while (TREE_CODE (incremented) == NOP_EXPR
9671 || TREE_CODE (incremented) == CONVERT_EXPR)
9673 newexp = convert (TREE_TYPE (incremented), newexp);
9674 incremented = TREE_OPERAND (incremented, 0);
9677 temp = expand_assignment (incremented, newexp, ! post && ! ignore);
9678 return post ? op0 : temp;
9683 /* We have a true reference to the value in OP0.
9684 If there is an insn to add or subtract in this mode, queue it.
9685 Queuing the increment insn avoids the register shuffling
9686 that often results if we must increment now and first save
9687 the old value for subsequent use. */
9689 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9690 op0 = stabilize (op0);
9693 icode = (int) this_optab->handlers[(int) mode].insn_code;
9694 if (icode != (int) CODE_FOR_nothing
9695 /* Make sure that OP0 is valid for operands 0 and 1
9696 of the insn we want to queue. */
9697 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9698 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9700 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9701 op1 = force_reg (mode, op1);
9703 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9705 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9707 rtx addr = (general_operand (XEXP (op0, 0), mode)
9708 ? force_reg (Pmode, XEXP (op0, 0))
9709 : copy_to_reg (XEXP (op0, 0)));
9712 op0 = replace_equiv_address (op0, addr);
9713 temp = force_reg (GET_MODE (op0), op0);
9714 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9715 op1 = force_reg (mode, op1);
9717 /* The increment queue is LIFO, thus we have to `queue'
9718 the instructions in reverse order. */
9719 enqueue_insn (op0, gen_move_insn (op0, temp));
9720 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9725 /* Preincrement, or we can't increment with one simple insn. */
9727 /* Save a copy of the value before inc or dec, to return it later. */
9728 temp = value = copy_to_reg (op0);
9730 /* Arrange to return the incremented value. */
9731 /* Copy the rtx because expand_binop will protect from the queue,
9732 and the results of that would be invalid for us to return
9733 if our caller does emit_queue before using our result. */
9734 temp = copy_rtx (value = op0);
9736 /* Increment however we can. */
9737 op1 = expand_binop (mode, this_optab, value, op1, op0,
9738 TYPE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9740 /* Make sure the value is stored into OP0. */
9742 emit_move_insn (op0, op1);
9747 /* Generate code to calculate EXP using a store-flag instruction
9748 and return an rtx for the result. EXP is either a comparison
9749 or a TRUTH_NOT_EXPR whose operand is a comparison.
9751 If TARGET is nonzero, store the result there if convenient.
9753 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
9756 Return zero if there is no suitable set-flag instruction
9757 available on this machine.
9759 Once expand_expr has been called on the arguments of the comparison,
9760 we are committed to doing the store flag, since it is not safe to
9761 re-evaluate the expression. We emit the store-flag insn by calling
9762 emit_store_flag, but only expand the arguments if we have a reason
9763 to believe that emit_store_flag will be successful. If we think that
9764 it will, but it isn't, we have to simulate the store-flag with a
9765 set/jump/set sequence. */
9768 do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
9771 tree arg0, arg1, type;
9773 enum machine_mode operand_mode;
9777 enum insn_code icode;
9778 rtx subtarget = target;
9781 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9782 result at the end. We can't simply invert the test since it would
9783 have already been inverted if it were valid. This case occurs for
9784 some floating-point comparisons. */
9786 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9787 invert = 1, exp = TREE_OPERAND (exp, 0);
9789 arg0 = TREE_OPERAND (exp, 0);
9790 arg1 = TREE_OPERAND (exp, 1);
9792 /* Don't crash if the comparison was erroneous. */
9793 if (arg0 == error_mark_node || arg1 == error_mark_node)
9796 type = TREE_TYPE (arg0);
9797 operand_mode = TYPE_MODE (type);
9798 unsignedp = TYPE_UNSIGNED (type);
9800 /* We won't bother with BLKmode store-flag operations because it would mean
9801 passing a lot of information to emit_store_flag. */
9802 if (operand_mode == BLKmode)
9805 /* We won't bother with store-flag operations involving function pointers
9806 when function pointers must be canonicalized before comparisons. */
9807 #ifdef HAVE_canonicalize_funcptr_for_compare
9808 if (HAVE_canonicalize_funcptr_for_compare
9809 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9810 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9812 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9813 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9814 == FUNCTION_TYPE))))
9821 /* Get the rtx comparison code to use. We know that EXP is a comparison
9822 operation of some type. Some comparisons against 1 and -1 can be
9823 converted to comparisons with zero. Do so here so that the tests
9824 below will be aware that we have a comparison with zero. These
9825 tests will not catch constants in the first operand, but constants
9826 are rarely passed as the first operand. */
9828 switch (TREE_CODE (exp))
9837 if (integer_onep (arg1))
9838 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9840 code = unsignedp ? LTU : LT;
9843 if (! unsignedp && integer_all_onesp (arg1))
9844 arg1 = integer_zero_node, code = LT;
9846 code = unsignedp ? LEU : LE;
9849 if (! unsignedp && integer_all_onesp (arg1))
9850 arg1 = integer_zero_node, code = GE;
9852 code = unsignedp ? GTU : GT;
9855 if (integer_onep (arg1))
9856 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9858 code = unsignedp ? GEU : GE;
9861 case UNORDERED_EXPR:
9890 /* Put a constant second. */
9891 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
9893 tem = arg0; arg0 = arg1; arg1 = tem;
9894 code = swap_condition (code);
9897 /* If this is an equality or inequality test of a single bit, we can
9898 do this by shifting the bit being tested to the low-order bit and
9899 masking the result with the constant 1. If the condition was EQ,
9900 we xor it with 1. This does not require an scc insn and is faster
9901 than an scc insn even if we have it.
9903 The code to make this transformation was moved into fold_single_bit_test,
9904 so we just call into the folder and expand its result. */
9906 if ((code == NE || code == EQ)
9907 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9908 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9910 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
9911 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
9913 target, VOIDmode, EXPAND_NORMAL);
9916 /* Now see if we are likely to be able to do this. Return if not. */
9917 if (! can_compare_p (code, operand_mode, ccp_store_flag))
9920 icode = setcc_gen_code[(int) code];
9921 if (icode == CODE_FOR_nothing
9922 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
9924 /* We can only do this if it is one of the special cases that
9925 can be handled without an scc insn. */
9926 if ((code == LT && integer_zerop (arg1))
9927 || (! only_cheap && code == GE && integer_zerop (arg1)))
9929 else if (BRANCH_COST >= 0
9930 && ! only_cheap && (code == NE || code == EQ)
9931 && TREE_CODE (type) != REAL_TYPE
9932 && ((abs_optab->handlers[(int) operand_mode].insn_code
9933 != CODE_FOR_nothing)
9934 || (ffs_optab->handlers[(int) operand_mode].insn_code
9935 != CODE_FOR_nothing)))
9941 if (! get_subtarget (target)
9942 || GET_MODE (subtarget) != operand_mode)
9945 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
9948 target = gen_reg_rtx (mode);
9950 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
9951 because, if the emit_store_flag does anything it will succeed and
9952 OP0 and OP1 will not be used subsequently. */
9954 result = emit_store_flag (target, code,
9955 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
9956 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
9957 operand_mode, unsignedp, 1);
9962 result = expand_binop (mode, xor_optab, result, const1_rtx,
9963 result, 0, OPTAB_LIB_WIDEN);
9967 /* If this failed, we have to do this with set/compare/jump/set code. */
9969 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9970 target = gen_reg_rtx (GET_MODE (target));
9972 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9973 result = compare_from_rtx (op0, op1, code, unsignedp,
9974 operand_mode, NULL_RTX);
9975 if (GET_CODE (result) == CONST_INT)
9976 return (((result == const0_rtx && ! invert)
9977 || (result != const0_rtx && invert))
9978 ? const0_rtx : const1_rtx);
9980 /* The code of RESULT may not match CODE if compare_from_rtx
9981 decided to swap its operands and reverse the original code.
9983 We know that compare_from_rtx returns either a CONST_INT or
9984 a new comparison code, so it is safe to just extract the
9985 code from RESULT. */
9986 code = GET_CODE (result);
9988 label = gen_label_rtx ();
9989 if (bcc_gen_fctn[(int) code] == 0)
9992 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
9993 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10000 /* Stubs in case we haven't got a casesi insn. */
10001 #ifndef HAVE_casesi
10002 # define HAVE_casesi 0
10003 # define gen_casesi(a, b, c, d, e) (0)
10004 # define CODE_FOR_casesi CODE_FOR_nothing
10007 /* If the machine does not have a case insn that compares the bounds,
10008 this means extra overhead for dispatch tables, which raises the
10009 threshold for using them. */
10010 #ifndef CASE_VALUES_THRESHOLD
10011 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10012 #endif /* CASE_VALUES_THRESHOLD */
10015 case_values_threshold (void)
10017 return CASE_VALUES_THRESHOLD;
10020 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10021 0 otherwise (i.e. if there is no casesi instruction). */
10023 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
10024 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
10026 enum machine_mode index_mode = SImode;
10027 int index_bits = GET_MODE_BITSIZE (index_mode);
10028 rtx op1, op2, index;
10029 enum machine_mode op_mode;
10034 /* Convert the index to SImode. */
10035 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10037 enum machine_mode omode = TYPE_MODE (index_type);
10038 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
10040 /* We must handle the endpoints in the original mode. */
10041 index_expr = build (MINUS_EXPR, index_type,
10042 index_expr, minval);
10043 minval = integer_zero_node;
10044 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10045 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10046 omode, 1, default_label);
10047 /* Now we can safely truncate. */
10048 index = convert_to_mode (index_mode, index, 0);
10052 if (TYPE_MODE (index_type) != index_mode)
10054 index_expr = convert (lang_hooks.types.type_for_size
10055 (index_bits, 0), index_expr);
10056 index_type = TREE_TYPE (index_expr);
10059 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10062 index = protect_from_queue (index, 0);
10063 do_pending_stack_adjust ();
10065 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
10066 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
10068 index = copy_to_mode_reg (op_mode, index);
10070 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
10072 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
10073 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
10074 op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
10075 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
10077 op1 = copy_to_mode_reg (op_mode, op1);
10079 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
10081 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
10082 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
10083 op2, TYPE_UNSIGNED (TREE_TYPE (range)));
10084 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
10086 op2 = copy_to_mode_reg (op_mode, op2);
10088 emit_jump_insn (gen_casesi (index, op1, op2,
10089 table_label, default_label));
10093 /* Attempt to generate a tablejump instruction; same concept. */
10094 #ifndef HAVE_tablejump
10095 #define HAVE_tablejump 0
10096 #define gen_tablejump(x, y) (0)
10099 /* Subroutine of the next function.
10101 INDEX is the value being switched on, with the lowest value
10102 in the table already subtracted.
10103 MODE is its expected mode (needed if INDEX is constant).
10104 RANGE is the length of the jump table.
10105 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10107 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10108 index value is out of range. */
10111 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
10116 if (INTVAL (range) > cfun->max_jumptable_ents)
10117 cfun->max_jumptable_ents = INTVAL (range);
10119 /* Do an unsigned comparison (in the proper mode) between the index
10120 expression and the value which represents the length of the range.
10121 Since we just finished subtracting the lower bound of the range
10122 from the index expression, this comparison allows us to simultaneously
10123 check that the original index expression value is both greater than
10124 or equal to the minimum value of the range and less than or equal to
10125 the maximum value of the range. */
10127 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10130 /* If index is in range, it must fit in Pmode.
10131 Convert to Pmode so we can index with it. */
10133 index = convert_to_mode (Pmode, index, 1);
10135 /* Don't let a MEM slip through, because then INDEX that comes
10136 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10137 and break_out_memory_refs will go to work on it and mess it up. */
10138 #ifdef PIC_CASE_VECTOR_ADDRESS
10139 if (flag_pic && !REG_P (index))
10140 index = copy_to_mode_reg (Pmode, index);
10143 /* If flag_force_addr were to affect this address
10144 it could interfere with the tricky assumptions made
10145 about addresses that contain label-refs,
10146 which may be valid only very near the tablejump itself. */
10147 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10148 GET_MODE_SIZE, because this indicates how large insns are. The other
10149 uses should all be Pmode, because they are addresses. This code
10150 could fail if addresses and insns are not the same size. */
10151 index = gen_rtx_PLUS (Pmode,
10152 gen_rtx_MULT (Pmode, index,
10153 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10154 gen_rtx_LABEL_REF (Pmode, table_label));
10155 #ifdef PIC_CASE_VECTOR_ADDRESS
10157 index = PIC_CASE_VECTOR_ADDRESS (index);
10160 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10161 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10162 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10163 RTX_UNCHANGING_P (vector) = 1;
10164 MEM_NOTRAP_P (vector) = 1;
10165 convert_move (temp, vector, 0);
10167 emit_jump_insn (gen_tablejump (temp, table_label));
10169 /* If we are generating PIC code or if the table is PC-relative, the
10170 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10171 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10176 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
10177 rtx table_label, rtx default_label)
10181 if (! HAVE_tablejump)
10184 index_expr = fold (build (MINUS_EXPR, index_type,
10185 convert (index_type, index_expr),
10186 convert (index_type, minval)));
10187 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10189 index = protect_from_queue (index, 0);
10190 do_pending_stack_adjust ();
10192 do_tablejump (index, TYPE_MODE (index_type),
10193 convert_modes (TYPE_MODE (index_type),
10194 TYPE_MODE (TREE_TYPE (range)),
10195 expand_expr (range, NULL_RTX,
10197 TYPE_UNSIGNED (TREE_TYPE (range))),
10198 table_label, default_label);
10202 /* Nonzero if the mode is a valid vector mode for this architecture.
10203 This returns nonzero even if there is no hardware support for the
10204 vector mode, but we can emulate with narrower modes. */
10207 vector_mode_valid_p (enum machine_mode mode)
10209 enum mode_class class = GET_MODE_CLASS (mode);
10210 enum machine_mode innermode;
10212 /* Doh! What's going on? */
10213 if (class != MODE_VECTOR_INT
10214 && class != MODE_VECTOR_FLOAT)
10217 /* Hardware support. Woo hoo! */
10218 if (VECTOR_MODE_SUPPORTED_P (mode))
10221 innermode = GET_MODE_INNER (mode);
10223 /* We should probably return 1 if requesting V4DI and we have no DI,
10224 but we have V2DI, but this is probably very unlikely. */
10226 /* If we have support for the inner mode, we can safely emulate it.
10227 We may not have V2DI, but me can emulate with a pair of DIs. */
10228 return mov_optab->handlers[innermode].insn_code != CODE_FOR_nothing;
10231 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
10233 const_vector_from_tree (tree exp)
10238 enum machine_mode inner, mode;
10240 mode = TYPE_MODE (TREE_TYPE (exp));
10242 if (initializer_zerop (exp))
10243 return CONST0_RTX (mode);
10245 units = GET_MODE_NUNITS (mode);
10246 inner = GET_MODE_INNER (mode);
10248 v = rtvec_alloc (units);
10250 link = TREE_VECTOR_CST_ELTS (exp);
10251 for (i = 0; link; link = TREE_CHAIN (link), ++i)
10253 elt = TREE_VALUE (link);
10255 if (TREE_CODE (elt) == REAL_CST)
10256 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
10259 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
10260 TREE_INT_CST_HIGH (elt),
10264 /* Initialize remaining elements to 0. */
10265 for (; i < units; ++i)
10266 RTVEC_ELT (v, i) = CONST0_RTX (inner);
10268 return gen_rtx_raw_CONST_VECTOR (mode, v);
10270 #include "gt-expr.h"