1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 #include "coretypes.h"
32 #include "hard-reg-set.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
44 #include "typeclass.h"
47 #include "langhooks.h"
50 #include "tree-iterator.h"
51 #include "tree-pass.h"
52 #include "tree-flow.h"
56 /* Decide whether a function's arguments should be processed
57 from first to last or from last to first.
59 They should if the stack and args grow in opposite directions, but
60 only if we have push insns. */
64 #ifndef PUSH_ARGS_REVERSED
65 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
66 #define PUSH_ARGS_REVERSED /* If it's last to first. */
72 #ifndef STACK_PUSH_CODE
73 #ifdef STACK_GROWS_DOWNWARD
74 #define STACK_PUSH_CODE PRE_DEC
76 #define STACK_PUSH_CODE PRE_INC
81 /* If this is nonzero, we do not bother generating VOLATILE
82 around volatile memory references, and we are willing to
83 output indirect addresses. If cse is to follow, we reject
84 indirect addresses so a useful potential cse is generated;
85 if it is used only once, instruction combination will produce
86 the same indirect address eventually. */
89 /* This structure is used by move_by_pieces to describe the move to
100 int explicit_inc_from;
101 unsigned HOST_WIDE_INT len;
102 HOST_WIDE_INT offset;
106 /* This structure is used by store_by_pieces to describe the clear to
109 struct store_by_pieces
115 unsigned HOST_WIDE_INT len;
116 HOST_WIDE_INT offset;
117 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
122 static rtx enqueue_insn (rtx, rtx);
123 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
125 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
126 struct move_by_pieces *);
127 static bool block_move_libcall_safe_for_call_parm (void);
128 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned);
129 static rtx emit_block_move_via_libcall (rtx, rtx, rtx);
130 static tree emit_block_move_libcall_fn (int);
131 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
132 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
133 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
134 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
135 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
136 struct store_by_pieces *);
137 static bool clear_storage_via_clrmem (rtx, rtx, unsigned);
138 static rtx clear_storage_via_libcall (rtx, rtx);
139 static tree clear_storage_libcall_fn (int);
140 static rtx compress_float_constant (rtx, rtx);
141 static rtx get_subtarget (rtx);
142 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
143 HOST_WIDE_INT, enum machine_mode,
144 tree, tree, int, int);
145 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
146 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
147 tree, enum machine_mode, int, tree, int);
148 static rtx var_rtx (tree);
150 static unsigned HOST_WIDE_INT highest_pow2_factor (tree);
151 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (tree, tree);
153 static int is_aligning_offset (tree, tree);
154 static rtx expand_increment (tree, int, int);
155 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
156 enum expand_modifier);
157 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
158 static rtx do_store_flag (tree, rtx, enum machine_mode, int);
160 static void emit_single_push_insn (enum machine_mode, rtx, tree);
162 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
163 static rtx const_vector_from_tree (tree);
165 /* Record for each mode whether we can move a register directly to or
166 from an object of that mode in memory. If we can't, we won't try
167 to use that mode directly when accessing a field of that mode. */
169 static char direct_load[NUM_MACHINE_MODES];
170 static char direct_store[NUM_MACHINE_MODES];
172 /* Record for each mode whether we can float-extend from memory. */
174 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
176 /* This macro is used to determine whether move_by_pieces should be called
177 to perform a structure copy. */
178 #ifndef MOVE_BY_PIECES_P
179 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
180 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
183 /* This macro is used to determine whether clear_by_pieces should be
184 called to clear storage. */
185 #ifndef CLEAR_BY_PIECES_P
186 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
187 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
190 /* This macro is used to determine whether store_by_pieces should be
191 called to "memset" storage with byte values other than zero, or
192 to "memcpy" storage when the source is a constant string. */
193 #ifndef STORE_BY_PIECES_P
194 #define STORE_BY_PIECES_P(SIZE, ALIGN) MOVE_BY_PIECES_P (SIZE, ALIGN)
197 /* This array records the insn_code of insns to perform block moves. */
198 enum insn_code movmem_optab[NUM_MACHINE_MODES];
200 /* This array records the insn_code of insns to perform block clears. */
201 enum insn_code clrmem_optab[NUM_MACHINE_MODES];
203 /* These arrays record the insn_code of two different kinds of insns
204 to perform block compares. */
205 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
206 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
208 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
210 #ifndef SLOW_UNALIGNED_ACCESS
211 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
214 /* This is run once per compilation to set up which modes can be used
215 directly in memory and to initialize the block move optab. */
218 init_expr_once (void)
221 enum machine_mode mode;
226 /* Try indexing by frame ptr and try by stack ptr.
227 It is known that on the Convex the stack ptr isn't a valid index.
228 With luck, one or the other is valid on any machine. */
229 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
230 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
232 /* A scratch register we can modify in-place below to avoid
233 useless RTL allocations. */
234 reg = gen_rtx_REG (VOIDmode, -1);
236 insn = rtx_alloc (INSN);
237 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
238 PATTERN (insn) = pat;
240 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
241 mode = (enum machine_mode) ((int) mode + 1))
245 direct_load[(int) mode] = direct_store[(int) mode] = 0;
246 PUT_MODE (mem, mode);
247 PUT_MODE (mem1, mode);
248 PUT_MODE (reg, mode);
250 /* See if there is some register that can be used in this mode and
251 directly loaded or stored from memory. */
253 if (mode != VOIDmode && mode != BLKmode)
254 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
255 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
258 if (! HARD_REGNO_MODE_OK (regno, mode))
264 SET_DEST (pat) = reg;
265 if (recog (pat, insn, &num_clobbers) >= 0)
266 direct_load[(int) mode] = 1;
268 SET_SRC (pat) = mem1;
269 SET_DEST (pat) = reg;
270 if (recog (pat, insn, &num_clobbers) >= 0)
271 direct_load[(int) mode] = 1;
274 SET_DEST (pat) = mem;
275 if (recog (pat, insn, &num_clobbers) >= 0)
276 direct_store[(int) mode] = 1;
279 SET_DEST (pat) = mem1;
280 if (recog (pat, insn, &num_clobbers) >= 0)
281 direct_store[(int) mode] = 1;
285 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
287 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
288 mode = GET_MODE_WIDER_MODE (mode))
290 enum machine_mode srcmode;
291 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
292 srcmode = GET_MODE_WIDER_MODE (srcmode))
296 ic = can_extend_p (mode, srcmode, 0);
297 if (ic == CODE_FOR_nothing)
300 PUT_MODE (mem, srcmode);
302 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
303 float_extend_from_mem[mode][srcmode] = true;
308 /* This is run at the start of compiling a function. */
313 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
316 /* Small sanity check that the queue is empty at the end of a function. */
319 finish_expr_for_function (void)
325 /* Manage the queue of increment instructions to be output
326 for POSTINCREMENT_EXPR expressions, etc. */
328 /* Queue up to increment (or change) VAR later. BODY says how:
329 BODY should be the same thing you would pass to emit_insn
330 to increment right away. It will go to emit_insn later on.
332 The value is a QUEUED expression to be used in place of VAR
333 where you want to guarantee the pre-incrementation value of VAR. */
336 enqueue_insn (rtx var, rtx body)
338 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
339 body, pending_chain);
340 return pending_chain;
343 /* Use protect_from_queue to convert a QUEUED expression
344 into something that you can put immediately into an instruction.
345 If the queued incrementation has not happened yet,
346 protect_from_queue returns the variable itself.
347 If the incrementation has happened, protect_from_queue returns a temp
348 that contains a copy of the old value of the variable.
350 Any time an rtx which might possibly be a QUEUED is to be put
351 into an instruction, it must be passed through protect_from_queue first.
352 QUEUED expressions are not meaningful in instructions.
354 Do not pass a value through protect_from_queue and then hold
355 on to it for a while before putting it in an instruction!
356 If the queue is flushed in between, incorrect code will result. */
359 protect_from_queue (rtx x, int modify)
361 RTX_CODE code = GET_CODE (x);
363 #if 0 /* A QUEUED can hang around after the queue is forced out. */
364 /* Shortcut for most common case. */
365 if (pending_chain == 0)
371 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
372 use of autoincrement. Make a copy of the contents of the memory
373 location rather than a copy of the address, but not if the value is
374 of mode BLKmode. Don't modify X in place since it might be
376 if (code == MEM && GET_MODE (x) != BLKmode
377 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
380 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
384 rtx temp = gen_reg_rtx (GET_MODE (x));
386 emit_insn_before (gen_move_insn (temp, new),
391 /* Copy the address into a pseudo, so that the returned value
392 remains correct across calls to emit_queue. */
393 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
396 /* Otherwise, recursively protect the subexpressions of all
397 the kinds of rtx's that can contain a QUEUED. */
400 rtx tem = protect_from_queue (XEXP (x, 0), 0);
401 if (tem != XEXP (x, 0))
407 else if (code == PLUS || code == MULT)
409 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
410 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
411 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
420 /* If the increment has not happened, use the variable itself. Copy it
421 into a new pseudo so that the value remains correct across calls to
423 if (QUEUED_INSN (x) == 0)
424 return copy_to_reg (QUEUED_VAR (x));
425 /* If the increment has happened and a pre-increment copy exists,
427 if (QUEUED_COPY (x) != 0)
428 return QUEUED_COPY (x);
429 /* The increment has happened but we haven't set up a pre-increment copy.
430 Set one up now, and use it. */
431 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
432 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
434 return QUEUED_COPY (x);
437 /* Return nonzero if X contains a QUEUED expression:
438 if it contains anything that will be altered by a queued increment.
439 We handle only combinations of MEM, PLUS, MINUS and MULT operators
440 since memory addresses generally contain only those. */
443 queued_subexp_p (rtx x)
445 enum rtx_code code = GET_CODE (x);
451 return queued_subexp_p (XEXP (x, 0));
455 return (queued_subexp_p (XEXP (x, 0))
456 || queued_subexp_p (XEXP (x, 1)));
462 /* Retrieve a mark on the queue. */
467 return pending_chain;
470 /* Perform all the pending incrementations that have been enqueued
471 after MARK was retrieved. If MARK is null, perform all the
472 pending incrementations. */
475 emit_insns_enqueued_after_mark (rtx mark)
479 /* The marked incrementation may have been emitted in the meantime
480 through a call to emit_queue. In this case, the mark is not valid
481 anymore so do nothing. */
482 if (mark && ! QUEUED_BODY (mark))
485 while ((p = pending_chain) != mark)
487 rtx body = QUEUED_BODY (p);
489 switch (GET_CODE (body))
497 QUEUED_INSN (p) = body;
501 #ifdef ENABLE_CHECKING
508 QUEUED_INSN (p) = emit_insn (body);
513 pending_chain = QUEUED_NEXT (p);
517 /* Perform all the pending incrementations. */
522 emit_insns_enqueued_after_mark (NULL_RTX);
525 /* Copy data from FROM to TO, where the machine modes are not the same.
526 Both modes may be integer, or both may be floating.
527 UNSIGNEDP should be nonzero if FROM is an unsigned type.
528 This causes zero-extension instead of sign-extension. */
531 convert_move (rtx to, rtx from, int unsignedp)
533 enum machine_mode to_mode = GET_MODE (to);
534 enum machine_mode from_mode = GET_MODE (from);
535 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
536 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
540 /* rtx code for making an equivalent value. */
541 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
542 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
544 to = protect_from_queue (to, 1);
545 from = protect_from_queue (from, 0);
547 if (to_real != from_real)
550 /* If the source and destination are already the same, then there's
555 /* If FROM is a SUBREG that indicates that we have already done at least
556 the required extension, strip it. We don't handle such SUBREGs as
559 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
560 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
561 >= GET_MODE_SIZE (to_mode))
562 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
563 from = gen_lowpart (to_mode, from), from_mode = to_mode;
565 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
568 if (to_mode == from_mode
569 || (from_mode == VOIDmode && CONSTANT_P (from)))
571 emit_move_insn (to, from);
575 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
577 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
580 if (VECTOR_MODE_P (to_mode))
581 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
583 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
585 emit_move_insn (to, from);
589 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
591 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
592 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
601 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
603 else if (GET_MODE_PRECISION (from_mode) > GET_MODE_PRECISION (to_mode))
608 /* Try converting directly if the insn is supported. */
610 code = tab->handlers[to_mode][from_mode].insn_code;
611 if (code != CODE_FOR_nothing)
613 emit_unop_insn (code, to, from,
614 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
618 /* Otherwise use a libcall. */
619 libcall = tab->handlers[to_mode][from_mode].libfunc;
622 /* This conversion is not implemented yet. */
626 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
628 insns = get_insns ();
630 emit_libcall_block (insns, to, value,
631 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
633 : gen_rtx_FLOAT_EXTEND (to_mode, from));
637 /* Handle pointer conversion. */ /* SPEE 900220. */
638 /* Targets are expected to provide conversion insns between PxImode and
639 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
640 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
642 enum machine_mode full_mode
643 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
645 if (trunc_optab->handlers[to_mode][full_mode].insn_code
649 if (full_mode != from_mode)
650 from = convert_to_mode (full_mode, from, unsignedp);
651 emit_unop_insn (trunc_optab->handlers[to_mode][full_mode].insn_code,
655 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
657 enum machine_mode full_mode
658 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
660 if (sext_optab->handlers[full_mode][from_mode].insn_code
664 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
666 if (to_mode == full_mode)
669 /* else proceed to integer conversions below. */
670 from_mode = full_mode;
673 /* Now both modes are integers. */
675 /* Handle expanding beyond a word. */
676 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
677 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
684 enum machine_mode lowpart_mode;
685 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
687 /* Try converting directly if the insn is supported. */
688 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
691 /* If FROM is a SUBREG, put it into a register. Do this
692 so that we always generate the same set of insns for
693 better cse'ing; if an intermediate assignment occurred,
694 we won't be doing the operation directly on the SUBREG. */
695 if (optimize > 0 && GET_CODE (from) == SUBREG)
696 from = force_reg (from_mode, from);
697 emit_unop_insn (code, to, from, equiv_code);
700 /* Next, try converting via full word. */
701 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
702 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
703 != CODE_FOR_nothing))
707 if (reg_overlap_mentioned_p (to, from))
708 from = force_reg (from_mode, from);
709 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
711 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
712 emit_unop_insn (code, to,
713 gen_lowpart (word_mode, to), equiv_code);
717 /* No special multiword conversion insn; do it by hand. */
720 /* Since we will turn this into a no conflict block, we must ensure
721 that the source does not overlap the target. */
723 if (reg_overlap_mentioned_p (to, from))
724 from = force_reg (from_mode, from);
726 /* Get a copy of FROM widened to a word, if necessary. */
727 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
728 lowpart_mode = word_mode;
730 lowpart_mode = from_mode;
732 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
734 lowpart = gen_lowpart (lowpart_mode, to);
735 emit_move_insn (lowpart, lowfrom);
737 /* Compute the value to put in each remaining word. */
739 fill_value = const0_rtx;
744 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
745 && STORE_FLAG_VALUE == -1)
747 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
749 fill_value = gen_reg_rtx (word_mode);
750 emit_insn (gen_slt (fill_value));
756 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
757 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
759 fill_value = convert_to_mode (word_mode, fill_value, 1);
763 /* Fill the remaining words. */
764 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
766 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
767 rtx subword = operand_subword (to, index, 1, to_mode);
772 if (fill_value != subword)
773 emit_move_insn (subword, fill_value);
776 insns = get_insns ();
779 emit_no_conflict_block (insns, to, from, NULL_RTX,
780 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
784 /* Truncating multi-word to a word or less. */
785 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
786 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
789 && ! MEM_VOLATILE_P (from)
790 && direct_load[(int) to_mode]
791 && ! mode_dependent_address_p (XEXP (from, 0)))
793 || GET_CODE (from) == SUBREG))
794 from = force_reg (from_mode, from);
795 convert_move (to, gen_lowpart (word_mode, from), 0);
799 /* Now follow all the conversions between integers
800 no more than a word long. */
802 /* For truncation, usually we can just refer to FROM in a narrower mode. */
803 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
804 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
805 GET_MODE_BITSIZE (from_mode)))
808 && ! MEM_VOLATILE_P (from)
809 && direct_load[(int) to_mode]
810 && ! mode_dependent_address_p (XEXP (from, 0)))
812 || GET_CODE (from) == SUBREG))
813 from = force_reg (from_mode, from);
814 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
815 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
816 from = copy_to_reg (from);
817 emit_move_insn (to, gen_lowpart (to_mode, from));
821 /* Handle extension. */
822 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
824 /* Convert directly if that works. */
825 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
829 from = force_not_mem (from);
831 emit_unop_insn (code, to, from, equiv_code);
836 enum machine_mode intermediate;
840 /* Search for a mode to convert via. */
841 for (intermediate = from_mode; intermediate != VOIDmode;
842 intermediate = GET_MODE_WIDER_MODE (intermediate))
843 if (((can_extend_p (to_mode, intermediate, unsignedp)
845 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
846 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
847 GET_MODE_BITSIZE (intermediate))))
848 && (can_extend_p (intermediate, from_mode, unsignedp)
849 != CODE_FOR_nothing))
851 convert_move (to, convert_to_mode (intermediate, from,
852 unsignedp), unsignedp);
856 /* No suitable intermediate mode.
857 Generate what we need with shifts. */
858 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
859 - GET_MODE_BITSIZE (from_mode), 0);
860 from = gen_lowpart (to_mode, force_reg (from_mode, from));
861 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
863 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
866 emit_move_insn (to, tmp);
871 /* Support special truncate insns for certain modes. */
872 if (trunc_optab->handlers[to_mode][from_mode].insn_code != CODE_FOR_nothing)
874 emit_unop_insn (trunc_optab->handlers[to_mode][from_mode].insn_code,
879 /* Handle truncation of volatile memrefs, and so on;
880 the things that couldn't be truncated directly,
881 and for which there was no special instruction.
883 ??? Code above formerly short-circuited this, for most integer
884 mode pairs, with a force_reg in from_mode followed by a recursive
885 call to this routine. Appears always to have been wrong. */
886 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
888 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
889 emit_move_insn (to, temp);
893 /* Mode combination is not recognized. */
897 /* Return an rtx for a value that would result
898 from converting X to mode MODE.
899 Both X and MODE may be floating, or both integer.
900 UNSIGNEDP is nonzero if X is an unsigned value.
901 This can be done by referring to a part of X in place
902 or by copying to a new temporary with conversion.
904 This function *must not* call protect_from_queue
905 except when putting X into an insn (in which case convert_move does it). */
908 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
910 return convert_modes (mode, VOIDmode, x, unsignedp);
913 /* Return an rtx for a value that would result
914 from converting X from mode OLDMODE to mode MODE.
915 Both modes may be floating, or both integer.
916 UNSIGNEDP is nonzero if X is an unsigned value.
918 This can be done by referring to a part of X in place
919 or by copying to a new temporary with conversion.
921 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
923 This function *must not* call protect_from_queue
924 except when putting X into an insn (in which case convert_move does it). */
927 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
931 /* If FROM is a SUBREG that indicates that we have already done at least
932 the required extension, strip it. */
934 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
935 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
936 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
937 x = gen_lowpart (mode, x);
939 if (GET_MODE (x) != VOIDmode)
940 oldmode = GET_MODE (x);
945 /* There is one case that we must handle specially: If we are converting
946 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
947 we are to interpret the constant as unsigned, gen_lowpart will do
948 the wrong if the constant appears negative. What we want to do is
949 make the high-order word of the constant zero, not all ones. */
951 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
952 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
953 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
955 HOST_WIDE_INT val = INTVAL (x);
957 if (oldmode != VOIDmode
958 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
960 int width = GET_MODE_BITSIZE (oldmode);
962 /* We need to zero extend VAL. */
963 val &= ((HOST_WIDE_INT) 1 << width) - 1;
966 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
969 /* We can do this with a gen_lowpart if both desired and current modes
970 are integer, and this is either a constant integer, a register, or a
971 non-volatile MEM. Except for the constant case where MODE is no
972 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
974 if ((GET_CODE (x) == CONST_INT
975 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
976 || (GET_MODE_CLASS (mode) == MODE_INT
977 && GET_MODE_CLASS (oldmode) == MODE_INT
978 && (GET_CODE (x) == CONST_DOUBLE
979 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
980 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
981 && direct_load[(int) mode])
983 && (! HARD_REGISTER_P (x)
984 || HARD_REGNO_MODE_OK (REGNO (x), mode))
985 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
986 GET_MODE_BITSIZE (GET_MODE (x)))))))))
988 /* ?? If we don't know OLDMODE, we have to assume here that
989 X does not need sign- or zero-extension. This may not be
990 the case, but it's the best we can do. */
991 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
992 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
994 HOST_WIDE_INT val = INTVAL (x);
995 int width = GET_MODE_BITSIZE (oldmode);
997 /* We must sign or zero-extend in this case. Start by
998 zero-extending, then sign extend if we need to. */
999 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1001 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1002 val |= (HOST_WIDE_INT) (-1) << width;
1004 return gen_int_mode (val, mode);
1007 return gen_lowpart (mode, x);
1010 /* Converting from integer constant into mode is always equivalent to an
1011 subreg operation. */
1012 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
1014 if (GET_MODE_BITSIZE (mode) != GET_MODE_BITSIZE (oldmode))
1016 return simplify_gen_subreg (mode, x, oldmode, 0);
1019 temp = gen_reg_rtx (mode);
1020 convert_move (temp, x, unsignedp);
1024 /* STORE_MAX_PIECES is the number of bytes at a time that we can
1025 store efficiently. Due to internal GCC limitations, this is
1026 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1027 for an immediate constant. */
1029 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1031 /* Determine whether the LEN bytes can be moved by using several move
1032 instructions. Return nonzero if a call to move_by_pieces should
1036 can_move_by_pieces (unsigned HOST_WIDE_INT len,
1037 unsigned int align ATTRIBUTE_UNUSED)
1039 return MOVE_BY_PIECES_P (len, align);
1042 /* Generate several move instructions to copy LEN bytes from block FROM to
1043 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1044 and TO through protect_from_queue before calling.
1046 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1047 used to push FROM to the stack.
1049 ALIGN is maximum stack alignment we can assume.
1051 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
1052 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
1056 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
1057 unsigned int align, int endp)
1059 struct move_by_pieces data;
1060 rtx to_addr, from_addr = XEXP (from, 0);
1061 unsigned int max_size = MOVE_MAX_PIECES + 1;
1062 enum machine_mode mode = VOIDmode, tmode;
1063 enum insn_code icode;
1065 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
1068 data.from_addr = from_addr;
1071 to_addr = XEXP (to, 0);
1074 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1075 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1077 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1084 #ifdef STACK_GROWS_DOWNWARD
1090 data.to_addr = to_addr;
1093 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1094 || GET_CODE (from_addr) == POST_INC
1095 || GET_CODE (from_addr) == POST_DEC);
1097 data.explicit_inc_from = 0;
1098 data.explicit_inc_to = 0;
1099 if (data.reverse) data.offset = len;
1102 /* If copying requires more than two move insns,
1103 copy addresses to registers (to make displacements shorter)
1104 and use post-increment if available. */
1105 if (!(data.autinc_from && data.autinc_to)
1106 && move_by_pieces_ninsns (len, align) > 2)
1108 /* Find the mode of the largest move... */
1109 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1110 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1111 if (GET_MODE_SIZE (tmode) < max_size)
1114 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1116 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1117 data.autinc_from = 1;
1118 data.explicit_inc_from = -1;
1120 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1122 data.from_addr = copy_addr_to_reg (from_addr);
1123 data.autinc_from = 1;
1124 data.explicit_inc_from = 1;
1126 if (!data.autinc_from && CONSTANT_P (from_addr))
1127 data.from_addr = copy_addr_to_reg (from_addr);
1128 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1130 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1132 data.explicit_inc_to = -1;
1134 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1136 data.to_addr = copy_addr_to_reg (to_addr);
1138 data.explicit_inc_to = 1;
1140 if (!data.autinc_to && CONSTANT_P (to_addr))
1141 data.to_addr = copy_addr_to_reg (to_addr);
1144 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1145 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1146 align = MOVE_MAX * BITS_PER_UNIT;
1148 /* First move what we can in the largest integer mode, then go to
1149 successively smaller modes. */
1151 while (max_size > 1)
1153 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1154 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1155 if (GET_MODE_SIZE (tmode) < max_size)
1158 if (mode == VOIDmode)
1161 icode = mov_optab->handlers[(int) mode].insn_code;
1162 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1163 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1165 max_size = GET_MODE_SIZE (mode);
1168 /* The code above should have handled everything. */
1182 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1183 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1185 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1188 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1195 to1 = adjust_address (data.to, QImode, data.offset);
1203 /* Return number of insns required to move L bytes by pieces.
1204 ALIGN (in bits) is maximum alignment we can assume. */
1206 static unsigned HOST_WIDE_INT
1207 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align)
1209 unsigned HOST_WIDE_INT n_insns = 0;
1210 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1212 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1213 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1214 align = MOVE_MAX * BITS_PER_UNIT;
1216 while (max_size > 1)
1218 enum machine_mode mode = VOIDmode, tmode;
1219 enum insn_code icode;
1221 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1222 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1223 if (GET_MODE_SIZE (tmode) < max_size)
1226 if (mode == VOIDmode)
1229 icode = mov_optab->handlers[(int) mode].insn_code;
1230 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1231 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1233 max_size = GET_MODE_SIZE (mode);
1241 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1242 with move instructions for mode MODE. GENFUN is the gen_... function
1243 to make a move insn for that mode. DATA has all the other info. */
1246 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1247 struct move_by_pieces *data)
1249 unsigned int size = GET_MODE_SIZE (mode);
1250 rtx to1 = NULL_RTX, from1;
1252 while (data->len >= size)
1255 data->offset -= size;
1259 if (data->autinc_to)
1260 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1263 to1 = adjust_address (data->to, mode, data->offset);
1266 if (data->autinc_from)
1267 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1270 from1 = adjust_address (data->from, mode, data->offset);
1272 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1273 emit_insn (gen_add2_insn (data->to_addr,
1274 GEN_INT (-(HOST_WIDE_INT)size)));
1275 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1276 emit_insn (gen_add2_insn (data->from_addr,
1277 GEN_INT (-(HOST_WIDE_INT)size)));
1280 emit_insn ((*genfun) (to1, from1));
1283 #ifdef PUSH_ROUNDING
1284 emit_single_push_insn (mode, from1, NULL);
1290 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1291 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1292 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1293 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1295 if (! data->reverse)
1296 data->offset += size;
1302 /* Emit code to move a block Y to a block X. This may be done with
1303 string-move instructions, with multiple scalar move instructions,
1304 or with a library call.
1306 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1307 SIZE is an rtx that says how long they are.
1308 ALIGN is the maximum alignment we can assume they have.
1309 METHOD describes what kind of copy this is, and what mechanisms may be used.
1311 Return the address of the new block, if memcpy is called and returns it,
1315 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1323 case BLOCK_OP_NORMAL:
1324 may_use_call = true;
1327 case BLOCK_OP_CALL_PARM:
1328 may_use_call = block_move_libcall_safe_for_call_parm ();
1330 /* Make inhibit_defer_pop nonzero around the library call
1331 to force it to pop the arguments right away. */
1335 case BLOCK_OP_NO_LIBCALL:
1336 may_use_call = false;
1343 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1345 x = protect_from_queue (x, 1);
1346 y = protect_from_queue (y, 0);
1347 size = protect_from_queue (size, 0);
1356 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1357 block copy is more efficient for other large modes, e.g. DCmode. */
1358 x = adjust_address (x, BLKmode, 0);
1359 y = adjust_address (y, BLKmode, 0);
1361 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1362 can be incorrect is coming from __builtin_memcpy. */
1363 if (GET_CODE (size) == CONST_INT)
1365 if (INTVAL (size) == 0)
1368 x = shallow_copy_rtx (x);
1369 y = shallow_copy_rtx (y);
1370 set_mem_size (x, size);
1371 set_mem_size (y, size);
1374 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1375 move_by_pieces (x, y, INTVAL (size), align, 0);
1376 else if (emit_block_move_via_movmem (x, y, size, align))
1378 else if (may_use_call)
1379 retval = emit_block_move_via_libcall (x, y, size);
1381 emit_block_move_via_loop (x, y, size, align);
1383 if (method == BLOCK_OP_CALL_PARM)
1389 /* A subroutine of emit_block_move. Returns true if calling the
1390 block move libcall will not clobber any parameters which may have
1391 already been placed on the stack. */
1394 block_move_libcall_safe_for_call_parm (void)
1396 /* If arguments are pushed on the stack, then they're safe. */
1400 /* If registers go on the stack anyway, any argument is sure to clobber
1401 an outgoing argument. */
1402 #if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1404 tree fn = emit_block_move_libcall_fn (false);
1406 if (REG_PARM_STACK_SPACE (fn) != 0)
1411 /* If any argument goes in memory, then it might clobber an outgoing
1414 CUMULATIVE_ARGS args_so_far;
1417 fn = emit_block_move_libcall_fn (false);
1418 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1420 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1421 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1423 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1424 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1425 if (!tmp || !REG_P (tmp))
1427 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1430 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1436 /* A subroutine of emit_block_move. Expand a movmem pattern;
1437 return true if successful. */
1440 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align)
1442 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1443 int save_volatile_ok = volatile_ok;
1444 enum machine_mode mode;
1446 /* Since this is a move insn, we don't care about volatility. */
1449 /* Try the most limited insn first, because there's no point
1450 including more than one in the machine description unless
1451 the more limited one has some advantage. */
1453 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1454 mode = GET_MODE_WIDER_MODE (mode))
1456 enum insn_code code = movmem_optab[(int) mode];
1457 insn_operand_predicate_fn pred;
1459 if (code != CODE_FOR_nothing
1460 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1461 here because if SIZE is less than the mode mask, as it is
1462 returned by the macro, it will definitely be less than the
1463 actual mode mask. */
1464 && ((GET_CODE (size) == CONST_INT
1465 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1466 <= (GET_MODE_MASK (mode) >> 1)))
1467 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1468 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1469 || (*pred) (x, BLKmode))
1470 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1471 || (*pred) (y, BLKmode))
1472 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1473 || (*pred) (opalign, VOIDmode)))
1476 rtx last = get_last_insn ();
1479 op2 = convert_to_mode (mode, size, 1);
1480 pred = insn_data[(int) code].operand[2].predicate;
1481 if (pred != 0 && ! (*pred) (op2, mode))
1482 op2 = copy_to_mode_reg (mode, op2);
1484 /* ??? When called via emit_block_move_for_call, it'd be
1485 nice if there were some way to inform the backend, so
1486 that it doesn't fail the expansion because it thinks
1487 emitting the libcall would be more efficient. */
1489 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1493 volatile_ok = save_volatile_ok;
1497 delete_insns_since (last);
1501 volatile_ok = save_volatile_ok;
1505 /* A subroutine of emit_block_move. Expand a call to memcpy.
1506 Return the return value from memcpy, 0 otherwise. */
1509 emit_block_move_via_libcall (rtx dst, rtx src, rtx size)
1511 rtx dst_addr, src_addr;
1512 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1513 enum machine_mode size_mode;
1516 /* DST, SRC, or SIZE may have been passed through protect_from_queue.
1518 It is unsafe to save the value generated by protect_from_queue and reuse
1519 it later. Consider what happens if emit_queue is called before the
1520 return value from protect_from_queue is used.
1522 Expansion of the CALL_EXPR below will call emit_queue before we are
1523 finished emitting RTL for argument setup. So if we are not careful we
1524 could get the wrong value for an argument.
1526 To avoid this problem we go ahead and emit code to copy the addresses of
1527 DST and SRC and SIZE into new pseudos.
1529 Note this is not strictly needed for library calls since they do not call
1530 emit_queue before loading their arguments. However, we may need to have
1531 library calls call emit_queue in the future since failing to do so could
1532 cause problems for targets which define SMALL_REGISTER_CLASSES and pass
1533 arguments in registers. */
1535 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1536 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1538 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1539 src_addr = convert_memory_address (ptr_mode, src_addr);
1541 dst_tree = make_tree (ptr_type_node, dst_addr);
1542 src_tree = make_tree (ptr_type_node, src_addr);
1544 size_mode = TYPE_MODE (sizetype);
1546 size = convert_to_mode (size_mode, size, 1);
1547 size = copy_to_mode_reg (size_mode, size);
1549 /* It is incorrect to use the libcall calling conventions to call
1550 memcpy in this context. This could be a user call to memcpy and
1551 the user may wish to examine the return value from memcpy. For
1552 targets where libcalls and normal calls have different conventions
1553 for returning pointers, we could end up generating incorrect code. */
1555 size_tree = make_tree (sizetype, size);
1557 fn = emit_block_move_libcall_fn (true);
1558 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1559 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1560 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1562 /* Now we have to build up the CALL_EXPR itself. */
1563 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1564 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1565 call_expr, arg_list, NULL_TREE);
1567 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1569 /* If we are initializing a readonly value, show the above call clobbered
1570 it. Otherwise, a load from it may erroneously be hoisted from a loop, or
1571 the delay slot scheduler might overlook conflicts and take nasty
1573 if (RTX_UNCHANGING_P (dst))
1574 add_function_usage_to
1575 (last_call_insn (), gen_rtx_EXPR_LIST (VOIDmode,
1576 gen_rtx_CLOBBER (VOIDmode, dst),
1582 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1583 for the function we use for block copies. The first time FOR_CALL
1584 is true, we call assemble_external. */
1586 static GTY(()) tree block_move_fn;
1589 init_block_move_fn (const char *asmspec)
1595 fn = get_identifier ("memcpy");
1596 args = build_function_type_list (ptr_type_node, ptr_type_node,
1597 const_ptr_type_node, sizetype,
1600 fn = build_decl (FUNCTION_DECL, fn, args);
1601 DECL_EXTERNAL (fn) = 1;
1602 TREE_PUBLIC (fn) = 1;
1603 DECL_ARTIFICIAL (fn) = 1;
1604 TREE_NOTHROW (fn) = 1;
1611 SET_DECL_RTL (block_move_fn, NULL_RTX);
1612 SET_DECL_ASSEMBLER_NAME (block_move_fn, get_identifier (asmspec));
1617 emit_block_move_libcall_fn (int for_call)
1619 static bool emitted_extern;
1622 init_block_move_fn (NULL);
1624 if (for_call && !emitted_extern)
1626 emitted_extern = true;
1627 make_decl_rtl (block_move_fn, NULL);
1628 assemble_external (block_move_fn);
1631 return block_move_fn;
1634 /* A subroutine of emit_block_move. Copy the data via an explicit
1635 loop. This is used only when libcalls are forbidden. */
1636 /* ??? It'd be nice to copy in hunks larger than QImode. */
1639 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1640 unsigned int align ATTRIBUTE_UNUSED)
1642 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1643 enum machine_mode iter_mode;
1645 iter_mode = GET_MODE (size);
1646 if (iter_mode == VOIDmode)
1647 iter_mode = word_mode;
1649 top_label = gen_label_rtx ();
1650 cmp_label = gen_label_rtx ();
1651 iter = gen_reg_rtx (iter_mode);
1653 emit_move_insn (iter, const0_rtx);
1655 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1656 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1657 do_pending_stack_adjust ();
1659 emit_jump (cmp_label);
1660 emit_label (top_label);
1662 tmp = convert_modes (Pmode, iter_mode, iter, true);
1663 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1664 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1665 x = change_address (x, QImode, x_addr);
1666 y = change_address (y, QImode, y_addr);
1668 emit_move_insn (x, y);
1670 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1671 true, OPTAB_LIB_WIDEN);
1673 emit_move_insn (iter, tmp);
1675 emit_label (cmp_label);
1677 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1681 /* Copy all or part of a value X into registers starting at REGNO.
1682 The number of registers to be filled is NREGS. */
1685 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1688 #ifdef HAVE_load_multiple
1696 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1697 x = validize_mem (force_const_mem (mode, x));
1699 /* See if the machine can do this with a load multiple insn. */
1700 #ifdef HAVE_load_multiple
1701 if (HAVE_load_multiple)
1703 last = get_last_insn ();
1704 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1712 delete_insns_since (last);
1716 for (i = 0; i < nregs; i++)
1717 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1718 operand_subword_force (x, i, mode));
1721 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1722 The number of registers to be filled is NREGS. */
1725 move_block_from_reg (int regno, rtx x, int nregs)
1732 /* See if the machine can do this with a store multiple insn. */
1733 #ifdef HAVE_store_multiple
1734 if (HAVE_store_multiple)
1736 rtx last = get_last_insn ();
1737 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1745 delete_insns_since (last);
1749 for (i = 0; i < nregs; i++)
1751 rtx tem = operand_subword (x, i, 1, BLKmode);
1756 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1760 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1761 ORIG, where ORIG is a non-consecutive group of registers represented by
1762 a PARALLEL. The clone is identical to the original except in that the
1763 original set of registers is replaced by a new set of pseudo registers.
1764 The new set has the same modes as the original set. */
1767 gen_group_rtx (rtx orig)
1772 if (GET_CODE (orig) != PARALLEL)
1775 length = XVECLEN (orig, 0);
1776 tmps = alloca (sizeof (rtx) * length);
1778 /* Skip a NULL entry in first slot. */
1779 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1784 for (; i < length; i++)
1786 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1787 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1789 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1792 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1795 /* Emit code to move a block ORIG_SRC of type TYPE to a block DST,
1796 where DST is non-consecutive registers represented by a PARALLEL.
1797 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1801 emit_group_load (rtx dst, rtx orig_src, tree type ATTRIBUTE_UNUSED, int ssize)
1806 if (GET_CODE (dst) != PARALLEL)
1809 /* Check for a NULL entry, used to indicate that the parameter goes
1810 both on the stack and in registers. */
1811 if (XEXP (XVECEXP (dst, 0, 0), 0))
1816 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
1818 /* Process the pieces. */
1819 for (i = start; i < XVECLEN (dst, 0); i++)
1821 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1822 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1823 unsigned int bytelen = GET_MODE_SIZE (mode);
1826 /* Handle trailing fragments that run over the size of the struct. */
1827 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1829 /* Arrange to shift the fragment to where it belongs.
1830 extract_bit_field loads to the lsb of the reg. */
1832 #ifdef BLOCK_REG_PADDING
1833 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1834 == (BYTES_BIG_ENDIAN ? upward : downward)
1839 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1840 bytelen = ssize - bytepos;
1845 /* If we won't be loading directly from memory, protect the real source
1846 from strange tricks we might play; but make sure that the source can
1847 be loaded directly into the destination. */
1849 if (!MEM_P (orig_src)
1850 && (!CONSTANT_P (orig_src)
1851 || (GET_MODE (orig_src) != mode
1852 && GET_MODE (orig_src) != VOIDmode)))
1854 if (GET_MODE (orig_src) == VOIDmode)
1855 src = gen_reg_rtx (mode);
1857 src = gen_reg_rtx (GET_MODE (orig_src));
1859 emit_move_insn (src, orig_src);
1862 /* Optimize the access just a bit. */
1864 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1865 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1866 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1867 && bytelen == GET_MODE_SIZE (mode))
1869 tmps[i] = gen_reg_rtx (mode);
1870 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1872 else if (GET_CODE (src) == CONCAT)
1874 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1875 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1877 if ((bytepos == 0 && bytelen == slen0)
1878 || (bytepos != 0 && bytepos + bytelen <= slen))
1880 /* The following assumes that the concatenated objects all
1881 have the same size. In this case, a simple calculation
1882 can be used to determine the object and the bit field
1884 tmps[i] = XEXP (src, bytepos / slen0);
1885 if (! CONSTANT_P (tmps[i])
1886 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1887 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1888 (bytepos % slen0) * BITS_PER_UNIT,
1889 1, NULL_RTX, mode, mode, ssize);
1891 else if (bytepos == 0)
1893 rtx mem = assign_stack_temp (GET_MODE (src), slen, 0);
1894 emit_move_insn (mem, src);
1895 tmps[i] = adjust_address (mem, mode, 0);
1900 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1901 SIMD register, which is currently broken. While we get GCC
1902 to emit proper RTL for these cases, let's dump to memory. */
1903 else if (VECTOR_MODE_P (GET_MODE (dst))
1906 int slen = GET_MODE_SIZE (GET_MODE (src));
1909 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1910 emit_move_insn (mem, src);
1911 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1913 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1914 && XVECLEN (dst, 0) > 1)
1915 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1916 else if (CONSTANT_P (src)
1917 || (REG_P (src) && GET_MODE (src) == mode))
1920 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1921 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1925 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1926 build_int_2 (shift, 0), tmps[i], 0);
1931 /* Copy the extracted pieces into the proper (probable) hard regs. */
1932 for (i = start; i < XVECLEN (dst, 0); i++)
1933 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
1936 /* Emit code to move a block SRC to block DST, where SRC and DST are
1937 non-consecutive groups of registers, each represented by a PARALLEL. */
1940 emit_group_move (rtx dst, rtx src)
1944 if (GET_CODE (src) != PARALLEL
1945 || GET_CODE (dst) != PARALLEL
1946 || XVECLEN (src, 0) != XVECLEN (dst, 0))
1949 /* Skip first entry if NULL. */
1950 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1951 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1952 XEXP (XVECEXP (src, 0, i), 0));
1955 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1956 where SRC is non-consecutive registers represented by a PARALLEL.
1957 SSIZE represents the total size of block ORIG_DST, or -1 if not
1961 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1966 if (GET_CODE (src) != PARALLEL)
1969 /* Check for a NULL entry, used to indicate that the parameter goes
1970 both on the stack and in registers. */
1971 if (XEXP (XVECEXP (src, 0, 0), 0))
1976 tmps = alloca (sizeof (rtx) * XVECLEN (src, 0));
1978 /* Copy the (probable) hard regs into pseudos. */
1979 for (i = start; i < XVECLEN (src, 0); i++)
1981 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1982 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1983 emit_move_insn (tmps[i], reg);
1987 /* If we won't be storing directly into memory, protect the real destination
1988 from strange tricks we might play. */
1990 if (GET_CODE (dst) == PARALLEL)
1994 /* We can get a PARALLEL dst if there is a conditional expression in
1995 a return statement. In that case, the dst and src are the same,
1996 so no action is necessary. */
1997 if (rtx_equal_p (dst, src))
2000 /* It is unclear if we can ever reach here, but we may as well handle
2001 it. Allocate a temporary, and split this into a store/load to/from
2004 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2005 emit_group_store (temp, src, type, ssize);
2006 emit_group_load (dst, temp, type, ssize);
2009 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
2011 dst = gen_reg_rtx (GET_MODE (orig_dst));
2012 /* Make life a bit easier for combine. */
2013 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
2016 /* Process the pieces. */
2017 for (i = start; i < XVECLEN (src, 0); i++)
2019 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2020 enum machine_mode mode = GET_MODE (tmps[i]);
2021 unsigned int bytelen = GET_MODE_SIZE (mode);
2024 /* Handle trailing fragments that run over the size of the struct. */
2025 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2027 /* store_bit_field always takes its value from the lsb.
2028 Move the fragment to the lsb if it's not already there. */
2030 #ifdef BLOCK_REG_PADDING
2031 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2032 == (BYTES_BIG_ENDIAN ? upward : downward)
2038 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2039 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2040 build_int_2 (shift, 0), tmps[i], 0);
2042 bytelen = ssize - bytepos;
2045 if (GET_CODE (dst) == CONCAT)
2047 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2048 dest = XEXP (dst, 0);
2049 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2051 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2052 dest = XEXP (dst, 1);
2054 else if (bytepos == 0 && XVECLEN (src, 0))
2056 dest = assign_stack_temp (GET_MODE (dest),
2057 GET_MODE_SIZE (GET_MODE (dest)), 0);
2058 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2067 /* Optimize the access just a bit. */
2069 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2070 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2071 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2072 && bytelen == GET_MODE_SIZE (mode))
2073 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2075 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2076 mode, tmps[i], ssize);
2081 /* Copy from the pseudo into the (probable) hard reg. */
2082 if (orig_dst != dst)
2083 emit_move_insn (orig_dst, dst);
2086 /* Generate code to copy a BLKmode object of TYPE out of a
2087 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2088 is null, a stack temporary is created. TGTBLK is returned.
2090 The purpose of this routine is to handle functions that return
2091 BLKmode structures in registers. Some machines (the PA for example)
2092 want to return all small structures in registers regardless of the
2093 structure's alignment. */
2096 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2098 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2099 rtx src = NULL, dst = NULL;
2100 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2101 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2105 tgtblk = assign_temp (build_qualified_type (type,
2107 | TYPE_QUAL_CONST)),
2109 preserve_temp_slots (tgtblk);
2112 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2113 into a new pseudo which is a full word. */
2115 if (GET_MODE (srcreg) != BLKmode
2116 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2117 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2119 /* If the structure doesn't take up a whole number of words, see whether
2120 SRCREG is padded on the left or on the right. If it's on the left,
2121 set PADDING_CORRECTION to the number of bits to skip.
2123 In most ABIs, the structure will be returned at the least end of
2124 the register, which translates to right padding on little-endian
2125 targets and left padding on big-endian targets. The opposite
2126 holds if the structure is returned at the most significant
2127 end of the register. */
2128 if (bytes % UNITS_PER_WORD != 0
2129 && (targetm.calls.return_in_msb (type)
2131 : BYTES_BIG_ENDIAN))
2133 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2135 /* Copy the structure BITSIZE bites at a time.
2137 We could probably emit more efficient code for machines which do not use
2138 strict alignment, but it doesn't seem worth the effort at the current
2140 for (bitpos = 0, xbitpos = padding_correction;
2141 bitpos < bytes * BITS_PER_UNIT;
2142 bitpos += bitsize, xbitpos += bitsize)
2144 /* We need a new source operand each time xbitpos is on a
2145 word boundary and when xbitpos == padding_correction
2146 (the first time through). */
2147 if (xbitpos % BITS_PER_WORD == 0
2148 || xbitpos == padding_correction)
2149 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2152 /* We need a new destination operand each time bitpos is on
2154 if (bitpos % BITS_PER_WORD == 0)
2155 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2157 /* Use xbitpos for the source extraction (right justified) and
2158 xbitpos for the destination store (left justified). */
2159 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2160 extract_bit_field (src, bitsize,
2161 xbitpos % BITS_PER_WORD, 1,
2162 NULL_RTX, word_mode, word_mode,
2170 /* Add a USE expression for REG to the (possibly empty) list pointed
2171 to by CALL_FUSAGE. REG must denote a hard register. */
2174 use_reg (rtx *call_fusage, rtx reg)
2177 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2181 = gen_rtx_EXPR_LIST (VOIDmode,
2182 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2185 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2186 starting at REGNO. All of these registers must be hard registers. */
2189 use_regs (rtx *call_fusage, int regno, int nregs)
2193 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2196 for (i = 0; i < nregs; i++)
2197 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2200 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2201 PARALLEL REGS. This is for calls that pass values in multiple
2202 non-contiguous locations. The Irix 6 ABI has examples of this. */
2205 use_group_regs (rtx *call_fusage, rtx regs)
2209 for (i = 0; i < XVECLEN (regs, 0); i++)
2211 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2213 /* A NULL entry means the parameter goes both on the stack and in
2214 registers. This can also be a MEM for targets that pass values
2215 partially on the stack and partially in registers. */
2216 if (reg != 0 && REG_P (reg))
2217 use_reg (call_fusage, reg);
2222 /* Determine whether the LEN bytes generated by CONSTFUN can be
2223 stored to memory using several move instructions. CONSTFUNDATA is
2224 a pointer which will be passed as argument in every CONSTFUN call.
2225 ALIGN is maximum alignment we can assume. Return nonzero if a
2226 call to store_by_pieces should succeed. */
2229 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2230 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2231 void *constfundata, unsigned int align)
2233 unsigned HOST_WIDE_INT max_size, l;
2234 HOST_WIDE_INT offset = 0;
2235 enum machine_mode mode, tmode;
2236 enum insn_code icode;
2243 if (! STORE_BY_PIECES_P (len, align))
2246 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2247 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2248 align = MOVE_MAX * BITS_PER_UNIT;
2250 /* We would first store what we can in the largest integer mode, then go to
2251 successively smaller modes. */
2254 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2259 max_size = STORE_MAX_PIECES + 1;
2260 while (max_size > 1)
2262 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2263 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2264 if (GET_MODE_SIZE (tmode) < max_size)
2267 if (mode == VOIDmode)
2270 icode = mov_optab->handlers[(int) mode].insn_code;
2271 if (icode != CODE_FOR_nothing
2272 && align >= GET_MODE_ALIGNMENT (mode))
2274 unsigned int size = GET_MODE_SIZE (mode);
2281 cst = (*constfun) (constfundata, offset, mode);
2282 if (!LEGITIMATE_CONSTANT_P (cst))
2292 max_size = GET_MODE_SIZE (mode);
2295 /* The code above should have handled everything. */
2303 /* Generate several move instructions to store LEN bytes generated by
2304 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2305 pointer which will be passed as argument in every CONSTFUN call.
2306 ALIGN is maximum alignment we can assume.
2307 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2308 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2312 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2313 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2314 void *constfundata, unsigned int align, int endp)
2316 struct store_by_pieces data;
2325 if (! STORE_BY_PIECES_P (len, align))
2327 to = protect_from_queue (to, 1);
2328 data.constfun = constfun;
2329 data.constfundata = constfundata;
2332 store_by_pieces_1 (&data, align);
2343 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2344 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2346 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2349 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2356 to1 = adjust_address (data.to, QImode, data.offset);
2364 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2365 rtx with BLKmode). The caller must pass TO through protect_from_queue
2366 before calling. ALIGN is maximum alignment we can assume. */
2369 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2371 struct store_by_pieces data;
2376 data.constfun = clear_by_pieces_1;
2377 data.constfundata = NULL;
2380 store_by_pieces_1 (&data, align);
2383 /* Callback routine for clear_by_pieces.
2384 Return const0_rtx unconditionally. */
2387 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2388 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2389 enum machine_mode mode ATTRIBUTE_UNUSED)
2394 /* Subroutine of clear_by_pieces and store_by_pieces.
2395 Generate several move instructions to store LEN bytes of block TO. (A MEM
2396 rtx with BLKmode). The caller must pass TO through protect_from_queue
2397 before calling. ALIGN is maximum alignment we can assume. */
2400 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2401 unsigned int align ATTRIBUTE_UNUSED)
2403 rtx to_addr = XEXP (data->to, 0);
2404 unsigned HOST_WIDE_INT max_size = STORE_MAX_PIECES + 1;
2405 enum machine_mode mode = VOIDmode, tmode;
2406 enum insn_code icode;
2409 data->to_addr = to_addr;
2411 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2412 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2414 data->explicit_inc_to = 0;
2416 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2418 data->offset = data->len;
2420 /* If storing requires more than two move insns,
2421 copy addresses to registers (to make displacements shorter)
2422 and use post-increment if available. */
2423 if (!data->autinc_to
2424 && move_by_pieces_ninsns (data->len, align) > 2)
2426 /* Determine the main mode we'll be using. */
2427 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2428 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2429 if (GET_MODE_SIZE (tmode) < max_size)
2432 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2434 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2435 data->autinc_to = 1;
2436 data->explicit_inc_to = -1;
2439 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2440 && ! data->autinc_to)
2442 data->to_addr = copy_addr_to_reg (to_addr);
2443 data->autinc_to = 1;
2444 data->explicit_inc_to = 1;
2447 if ( !data->autinc_to && CONSTANT_P (to_addr))
2448 data->to_addr = copy_addr_to_reg (to_addr);
2451 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2452 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2453 align = MOVE_MAX * BITS_PER_UNIT;
2455 /* First store what we can in the largest integer mode, then go to
2456 successively smaller modes. */
2458 while (max_size > 1)
2460 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2461 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2462 if (GET_MODE_SIZE (tmode) < max_size)
2465 if (mode == VOIDmode)
2468 icode = mov_optab->handlers[(int) mode].insn_code;
2469 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2470 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2472 max_size = GET_MODE_SIZE (mode);
2475 /* The code above should have handled everything. */
2480 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2481 with move instructions for mode MODE. GENFUN is the gen_... function
2482 to make a move insn for that mode. DATA has all the other info. */
2485 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2486 struct store_by_pieces *data)
2488 unsigned int size = GET_MODE_SIZE (mode);
2491 while (data->len >= size)
2494 data->offset -= size;
2496 if (data->autinc_to)
2497 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2500 to1 = adjust_address (data->to, mode, data->offset);
2502 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2503 emit_insn (gen_add2_insn (data->to_addr,
2504 GEN_INT (-(HOST_WIDE_INT) size)));
2506 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2507 emit_insn ((*genfun) (to1, cst));
2509 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2510 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2512 if (! data->reverse)
2513 data->offset += size;
2519 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2520 its length in bytes. */
2523 clear_storage (rtx object, rtx size)
2526 unsigned int align = (MEM_P (object) ? MEM_ALIGN (object)
2527 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2529 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2530 just move a zero. Otherwise, do this a piece at a time. */
2531 if (GET_MODE (object) != BLKmode
2532 && GET_CODE (size) == CONST_INT
2533 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
2534 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2537 object = protect_from_queue (object, 1);
2538 size = protect_from_queue (size, 0);
2540 if (size == const0_rtx)
2542 else if (GET_CODE (size) == CONST_INT
2543 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2544 clear_by_pieces (object, INTVAL (size), align);
2545 else if (clear_storage_via_clrmem (object, size, align))
2548 retval = clear_storage_via_libcall (object, size);
2554 /* A subroutine of clear_storage. Expand a clrmem pattern;
2555 return true if successful. */
2558 clear_storage_via_clrmem (rtx object, rtx size, unsigned int align)
2560 /* Try the most limited insn first, because there's no point
2561 including more than one in the machine description unless
2562 the more limited one has some advantage. */
2564 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2565 enum machine_mode mode;
2567 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2568 mode = GET_MODE_WIDER_MODE (mode))
2570 enum insn_code code = clrmem_optab[(int) mode];
2571 insn_operand_predicate_fn pred;
2573 if (code != CODE_FOR_nothing
2574 /* We don't need MODE to be narrower than
2575 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2576 the mode mask, as it is returned by the macro, it will
2577 definitely be less than the actual mode mask. */
2578 && ((GET_CODE (size) == CONST_INT
2579 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2580 <= (GET_MODE_MASK (mode) >> 1)))
2581 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2582 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2583 || (*pred) (object, BLKmode))
2584 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2585 || (*pred) (opalign, VOIDmode)))
2588 rtx last = get_last_insn ();
2591 op1 = convert_to_mode (mode, size, 1);
2592 pred = insn_data[(int) code].operand[1].predicate;
2593 if (pred != 0 && ! (*pred) (op1, mode))
2594 op1 = copy_to_mode_reg (mode, op1);
2596 pat = GEN_FCN ((int) code) (object, op1, opalign);
2603 delete_insns_since (last);
2610 /* A subroutine of clear_storage. Expand a call to memset.
2611 Return the return value of memset, 0 otherwise. */
2614 clear_storage_via_libcall (rtx object, rtx size)
2616 tree call_expr, arg_list, fn, object_tree, size_tree;
2617 enum machine_mode size_mode;
2620 /* OBJECT or SIZE may have been passed through protect_from_queue.
2622 It is unsafe to save the value generated by protect_from_queue
2623 and reuse it later. Consider what happens if emit_queue is
2624 called before the return value from protect_from_queue is used.
2626 Expansion of the CALL_EXPR below will call emit_queue before
2627 we are finished emitting RTL for argument setup. So if we are
2628 not careful we could get the wrong value for an argument.
2630 To avoid this problem we go ahead and emit code to copy OBJECT
2631 and SIZE into new pseudos.
2633 Note this is not strictly needed for library calls since they
2634 do not call emit_queue before loading their arguments. However,
2635 we may need to have library calls call emit_queue in the future
2636 since failing to do so could cause problems for targets which
2637 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2639 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2641 size_mode = TYPE_MODE (sizetype);
2642 size = convert_to_mode (size_mode, size, 1);
2643 size = copy_to_mode_reg (size_mode, size);
2645 /* It is incorrect to use the libcall calling conventions to call
2646 memset in this context. This could be a user call to memset and
2647 the user may wish to examine the return value from memset. For
2648 targets where libcalls and normal calls have different conventions
2649 for returning pointers, we could end up generating incorrect code. */
2651 object_tree = make_tree (ptr_type_node, object);
2652 size_tree = make_tree (sizetype, size);
2654 fn = clear_storage_libcall_fn (true);
2655 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
2656 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
2657 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
2659 /* Now we have to build up the CALL_EXPR itself. */
2660 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2661 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2662 call_expr, arg_list, NULL_TREE);
2664 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2666 /* If we are initializing a readonly value, show the above call
2667 clobbered it. Otherwise, a load from it may erroneously be
2668 hoisted from a loop. */
2669 if (RTX_UNCHANGING_P (object))
2670 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
2675 /* A subroutine of clear_storage_via_libcall. Create the tree node
2676 for the function we use for block clears. The first time FOR_CALL
2677 is true, we call assemble_external. */
2679 static GTY(()) tree block_clear_fn;
2682 init_block_clear_fn (const char *asmspec)
2684 if (!block_clear_fn)
2688 fn = get_identifier ("memset");
2689 args = build_function_type_list (ptr_type_node, ptr_type_node,
2690 integer_type_node, sizetype,
2693 fn = build_decl (FUNCTION_DECL, fn, args);
2694 DECL_EXTERNAL (fn) = 1;
2695 TREE_PUBLIC (fn) = 1;
2696 DECL_ARTIFICIAL (fn) = 1;
2697 TREE_NOTHROW (fn) = 1;
2699 block_clear_fn = fn;
2704 SET_DECL_RTL (block_clear_fn, NULL_RTX);
2705 SET_DECL_ASSEMBLER_NAME (block_clear_fn, get_identifier (asmspec));
2710 clear_storage_libcall_fn (int for_call)
2712 static bool emitted_extern;
2714 if (!block_clear_fn)
2715 init_block_clear_fn (NULL);
2717 if (for_call && !emitted_extern)
2719 emitted_extern = true;
2720 make_decl_rtl (block_clear_fn, NULL);
2721 assemble_external (block_clear_fn);
2724 return block_clear_fn;
2727 /* Generate code to copy Y into X.
2728 Both Y and X must have the same mode, except that
2729 Y can be a constant with VOIDmode.
2730 This mode cannot be BLKmode; use emit_block_move for that.
2732 Return the last instruction emitted. */
2735 emit_move_insn (rtx x, rtx y)
2737 enum machine_mode mode = GET_MODE (x);
2738 rtx y_cst = NULL_RTX;
2741 x = protect_from_queue (x, 1);
2742 y = protect_from_queue (y, 0);
2744 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2750 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
2751 && (last_insn = compress_float_constant (x, y)))
2756 if (!LEGITIMATE_CONSTANT_P (y))
2758 y = force_const_mem (mode, y);
2760 /* If the target's cannot_force_const_mem prevented the spill,
2761 assume that the target's move expanders will also take care
2762 of the non-legitimate constant. */
2768 /* If X or Y are memory references, verify that their addresses are valid
2771 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2772 && ! push_operand (x, GET_MODE (x)))
2774 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2775 x = validize_mem (x);
2778 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2780 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2781 y = validize_mem (y);
2783 if (mode == BLKmode)
2786 last_insn = emit_move_insn_1 (x, y);
2788 if (y_cst && REG_P (x)
2789 && (set = single_set (last_insn)) != NULL_RTX
2790 && SET_DEST (set) == x
2791 && ! rtx_equal_p (y_cst, SET_SRC (set)))
2792 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
2797 /* Low level part of emit_move_insn.
2798 Called just like emit_move_insn, but assumes X and Y
2799 are basically valid. */
2802 emit_move_insn_1 (rtx x, rtx y)
2804 enum machine_mode mode = GET_MODE (x);
2805 enum machine_mode submode;
2806 enum mode_class class = GET_MODE_CLASS (mode);
2808 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
2811 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2813 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2815 /* Expand complex moves by moving real part and imag part, if possible. */
2816 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2817 && BLKmode != (submode = GET_MODE_INNER (mode))
2818 && (mov_optab->handlers[(int) submode].insn_code
2819 != CODE_FOR_nothing))
2821 /* Don't split destination if it is a stack push. */
2822 int stack = push_operand (x, GET_MODE (x));
2824 #ifdef PUSH_ROUNDING
2825 /* In case we output to the stack, but the size is smaller than the
2826 machine can push exactly, we need to use move instructions. */
2828 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
2829 != GET_MODE_SIZE (submode)))
2832 HOST_WIDE_INT offset1, offset2;
2834 /* Do not use anti_adjust_stack, since we don't want to update
2835 stack_pointer_delta. */
2836 temp = expand_binop (Pmode,
2837 #ifdef STACK_GROWS_DOWNWARD
2845 (GET_MODE_SIZE (GET_MODE (x)))),
2846 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
2848 if (temp != stack_pointer_rtx)
2849 emit_move_insn (stack_pointer_rtx, temp);
2851 #ifdef STACK_GROWS_DOWNWARD
2853 offset2 = GET_MODE_SIZE (submode);
2855 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2856 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2857 + GET_MODE_SIZE (submode));
2860 emit_move_insn (change_address (x, submode,
2861 gen_rtx_PLUS (Pmode,
2863 GEN_INT (offset1))),
2864 gen_realpart (submode, y));
2865 emit_move_insn (change_address (x, submode,
2866 gen_rtx_PLUS (Pmode,
2868 GEN_INT (offset2))),
2869 gen_imagpart (submode, y));
2873 /* If this is a stack, push the highpart first, so it
2874 will be in the argument order.
2876 In that case, change_address is used only to convert
2877 the mode, not to change the address. */
2880 /* Note that the real part always precedes the imag part in memory
2881 regardless of machine's endianness. */
2882 #ifdef STACK_GROWS_DOWNWARD
2883 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2884 gen_imagpart (submode, y));
2885 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2886 gen_realpart (submode, y));
2888 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2889 gen_realpart (submode, y));
2890 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2891 gen_imagpart (submode, y));
2896 rtx realpart_x, realpart_y;
2897 rtx imagpart_x, imagpart_y;
2899 /* If this is a complex value with each part being smaller than a
2900 word, the usual calling sequence will likely pack the pieces into
2901 a single register. Unfortunately, SUBREG of hard registers only
2902 deals in terms of words, so we have a problem converting input
2903 arguments to the CONCAT of two registers that is used elsewhere
2904 for complex values. If this is before reload, we can copy it into
2905 memory and reload. FIXME, we should see about using extract and
2906 insert on integer registers, but complex short and complex char
2907 variables should be rarely used. */
2908 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2909 && (reload_in_progress | reload_completed) == 0)
2912 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2914 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2916 if (packed_dest_p || packed_src_p)
2918 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2919 ? MODE_FLOAT : MODE_INT);
2921 enum machine_mode reg_mode
2922 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2924 if (reg_mode != BLKmode)
2926 rtx mem = assign_stack_temp (reg_mode,
2927 GET_MODE_SIZE (mode), 0);
2928 rtx cmem = adjust_address (mem, mode, 0);
2932 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2934 emit_move_insn_1 (cmem, y);
2935 return emit_move_insn_1 (sreg, mem);
2939 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2941 emit_move_insn_1 (mem, sreg);
2942 return emit_move_insn_1 (x, cmem);
2948 realpart_x = gen_realpart (submode, x);
2949 realpart_y = gen_realpart (submode, y);
2950 imagpart_x = gen_imagpart (submode, x);
2951 imagpart_y = gen_imagpart (submode, y);
2953 /* Show the output dies here. This is necessary for SUBREGs
2954 of pseudos since we cannot track their lifetimes correctly;
2955 hard regs shouldn't appear here except as return values.
2956 We never want to emit such a clobber after reload. */
2958 && ! (reload_in_progress || reload_completed)
2959 && (GET_CODE (realpart_x) == SUBREG
2960 || GET_CODE (imagpart_x) == SUBREG))
2961 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2963 emit_move_insn (realpart_x, realpart_y);
2964 emit_move_insn (imagpart_x, imagpart_y);
2967 return get_last_insn ();
2970 /* Handle MODE_CC modes: If we don't have a special move insn for this mode,
2971 find a mode to do it in. If we have a movcc, use it. Otherwise,
2972 find the MODE_INT mode of the same width. */
2973 else if (GET_MODE_CLASS (mode) == MODE_CC
2974 && mov_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
2976 enum insn_code insn_code;
2977 enum machine_mode tmode = VOIDmode;
2981 && mov_optab->handlers[(int) CCmode].insn_code != CODE_FOR_nothing)
2984 for (tmode = QImode; tmode != VOIDmode;
2985 tmode = GET_MODE_WIDER_MODE (tmode))
2986 if (GET_MODE_SIZE (tmode) == GET_MODE_SIZE (mode))
2989 if (tmode == VOIDmode)
2992 /* Get X and Y in TMODE. We can't use gen_lowpart here because it
2993 may call change_address which is not appropriate if we were
2994 called when a reload was in progress. We don't have to worry
2995 about changing the address since the size in bytes is supposed to
2996 be the same. Copy the MEM to change the mode and move any
2997 substitutions from the old MEM to the new one. */
2999 if (reload_in_progress)
3001 x = gen_lowpart_common (tmode, x1);
3002 if (x == 0 && MEM_P (x1))
3004 x = adjust_address_nv (x1, tmode, 0);
3005 copy_replacements (x1, x);
3008 y = gen_lowpart_common (tmode, y1);
3009 if (y == 0 && MEM_P (y1))
3011 y = adjust_address_nv (y1, tmode, 0);
3012 copy_replacements (y1, y);
3017 x = gen_lowpart (tmode, x);
3018 y = gen_lowpart (tmode, y);
3021 insn_code = mov_optab->handlers[(int) tmode].insn_code;
3022 return emit_insn (GEN_FCN (insn_code) (x, y));
3025 /* Try using a move pattern for the corresponding integer mode. This is
3026 only safe when simplify_subreg can convert MODE constants into integer
3027 constants. At present, it can only do this reliably if the value
3028 fits within a HOST_WIDE_INT. */
3029 else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3030 && (submode = int_mode_for_mode (mode)) != BLKmode
3031 && mov_optab->handlers[submode].insn_code != CODE_FOR_nothing)
3032 return emit_insn (GEN_FCN (mov_optab->handlers[submode].insn_code)
3033 (simplify_gen_subreg (submode, x, mode, 0),
3034 simplify_gen_subreg (submode, y, mode, 0)));
3036 /* This will handle any multi-word or full-word mode that lacks a move_insn
3037 pattern. However, you will get better code if you define such patterns,
3038 even if they must turn into multiple assembler instructions. */
3039 else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
3046 #ifdef PUSH_ROUNDING
3048 /* If X is a push on the stack, do the push now and replace
3049 X with a reference to the stack pointer. */
3050 if (push_operand (x, GET_MODE (x)))
3055 /* Do not use anti_adjust_stack, since we don't want to update
3056 stack_pointer_delta. */
3057 temp = expand_binop (Pmode,
3058 #ifdef STACK_GROWS_DOWNWARD
3066 (GET_MODE_SIZE (GET_MODE (x)))),
3067 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3069 if (temp != stack_pointer_rtx)
3070 emit_move_insn (stack_pointer_rtx, temp);
3072 code = GET_CODE (XEXP (x, 0));
3074 /* Just hope that small offsets off SP are OK. */
3075 if (code == POST_INC)
3076 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3077 GEN_INT (-((HOST_WIDE_INT)
3078 GET_MODE_SIZE (GET_MODE (x)))));
3079 else if (code == POST_DEC)
3080 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3081 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3083 temp = stack_pointer_rtx;
3085 x = change_address (x, VOIDmode, temp);
3089 /* If we are in reload, see if either operand is a MEM whose address
3090 is scheduled for replacement. */
3091 if (reload_in_progress && MEM_P (x)
3092 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3093 x = replace_equiv_address_nv (x, inner);
3094 if (reload_in_progress && MEM_P (y)
3095 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3096 y = replace_equiv_address_nv (y, inner);
3102 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3105 rtx xpart = operand_subword (x, i, 1, mode);
3106 rtx ypart = operand_subword (y, i, 1, mode);
3108 /* If we can't get a part of Y, put Y into memory if it is a
3109 constant. Otherwise, force it into a register. If we still
3110 can't get a part of Y, abort. */
3111 if (ypart == 0 && CONSTANT_P (y))
3113 y = force_const_mem (mode, y);
3114 ypart = operand_subword (y, i, 1, mode);
3116 else if (ypart == 0)
3117 ypart = operand_subword_force (y, i, mode);
3119 if (xpart == 0 || ypart == 0)
3122 need_clobber |= (GET_CODE (xpart) == SUBREG);
3124 last_insn = emit_move_insn (xpart, ypart);
3130 /* Show the output dies here. This is necessary for SUBREGs
3131 of pseudos since we cannot track their lifetimes correctly;
3132 hard regs shouldn't appear here except as return values.
3133 We never want to emit such a clobber after reload. */
3135 && ! (reload_in_progress || reload_completed)
3136 && need_clobber != 0)
3137 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3147 /* If Y is representable exactly in a narrower mode, and the target can
3148 perform the extension directly from constant or memory, then emit the
3149 move as an extension. */
3152 compress_float_constant (rtx x, rtx y)
3154 enum machine_mode dstmode = GET_MODE (x);
3155 enum machine_mode orig_srcmode = GET_MODE (y);
3156 enum machine_mode srcmode;
3159 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3161 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3162 srcmode != orig_srcmode;
3163 srcmode = GET_MODE_WIDER_MODE (srcmode))
3166 rtx trunc_y, last_insn;
3168 /* Skip if the target can't extend this way. */
3169 ic = can_extend_p (dstmode, srcmode, 0);
3170 if (ic == CODE_FOR_nothing)
3173 /* Skip if the narrowed value isn't exact. */
3174 if (! exact_real_truncate (srcmode, &r))
3177 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3179 if (LEGITIMATE_CONSTANT_P (trunc_y))
3181 /* Skip if the target needs extra instructions to perform
3183 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3186 else if (float_extend_from_mem[dstmode][srcmode])
3187 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
3191 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3192 last_insn = get_last_insn ();
3195 set_unique_reg_note (last_insn, REG_EQUAL, y);
3203 /* Pushing data onto the stack. */
3205 /* Push a block of length SIZE (perhaps variable)
3206 and return an rtx to address the beginning of the block.
3207 Note that it is not possible for the value returned to be a QUEUED.
3208 The value may be virtual_outgoing_args_rtx.
3210 EXTRA is the number of bytes of padding to push in addition to SIZE.
3211 BELOW nonzero means this padding comes at low addresses;
3212 otherwise, the padding comes at high addresses. */
3215 push_block (rtx size, int extra, int below)
3219 size = convert_modes (Pmode, ptr_mode, size, 1);
3220 if (CONSTANT_P (size))
3221 anti_adjust_stack (plus_constant (size, extra));
3222 else if (REG_P (size) && extra == 0)
3223 anti_adjust_stack (size);
3226 temp = copy_to_mode_reg (Pmode, size);
3228 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3229 temp, 0, OPTAB_LIB_WIDEN);
3230 anti_adjust_stack (temp);
3233 #ifndef STACK_GROWS_DOWNWARD
3239 temp = virtual_outgoing_args_rtx;
3240 if (extra != 0 && below)
3241 temp = plus_constant (temp, extra);
3245 if (GET_CODE (size) == CONST_INT)
3246 temp = plus_constant (virtual_outgoing_args_rtx,
3247 -INTVAL (size) - (below ? 0 : extra));
3248 else if (extra != 0 && !below)
3249 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3250 negate_rtx (Pmode, plus_constant (size, extra)));
3252 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3253 negate_rtx (Pmode, size));
3256 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3259 #ifdef PUSH_ROUNDING
3261 /* Emit single push insn. */
3264 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3267 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3269 enum insn_code icode;
3270 insn_operand_predicate_fn pred;
3272 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3273 /* If there is push pattern, use it. Otherwise try old way of throwing
3274 MEM representing push operation to move expander. */
3275 icode = push_optab->handlers[(int) mode].insn_code;
3276 if (icode != CODE_FOR_nothing)
3278 if (((pred = insn_data[(int) icode].operand[0].predicate)
3279 && !((*pred) (x, mode))))
3280 x = force_reg (mode, x);
3281 emit_insn (GEN_FCN (icode) (x));
3284 if (GET_MODE_SIZE (mode) == rounded_size)
3285 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3286 /* If we are to pad downward, adjust the stack pointer first and
3287 then store X into the stack location using an offset. This is
3288 because emit_move_insn does not know how to pad; it does not have
3290 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3292 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3293 HOST_WIDE_INT offset;
3295 emit_move_insn (stack_pointer_rtx,
3296 expand_binop (Pmode,
3297 #ifdef STACK_GROWS_DOWNWARD
3303 GEN_INT (rounded_size),
3304 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3306 offset = (HOST_WIDE_INT) padding_size;
3307 #ifdef STACK_GROWS_DOWNWARD
3308 if (STACK_PUSH_CODE == POST_DEC)
3309 /* We have already decremented the stack pointer, so get the
3311 offset += (HOST_WIDE_INT) rounded_size;
3313 if (STACK_PUSH_CODE == POST_INC)
3314 /* We have already incremented the stack pointer, so get the
3316 offset -= (HOST_WIDE_INT) rounded_size;
3318 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3322 #ifdef STACK_GROWS_DOWNWARD
3323 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3324 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3325 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3327 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3328 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3329 GEN_INT (rounded_size));
3331 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3334 dest = gen_rtx_MEM (mode, dest_addr);
3338 set_mem_attributes (dest, type, 1);
3340 if (flag_optimize_sibling_calls)
3341 /* Function incoming arguments may overlap with sibling call
3342 outgoing arguments and we cannot allow reordering of reads
3343 from function arguments with stores to outgoing arguments
3344 of sibling calls. */
3345 set_mem_alias_set (dest, 0);
3347 emit_move_insn (dest, x);
3351 /* Generate code to push X onto the stack, assuming it has mode MODE and
3353 MODE is redundant except when X is a CONST_INT (since they don't
3355 SIZE is an rtx for the size of data to be copied (in bytes),
3356 needed only if X is BLKmode.
3358 ALIGN (in bits) is maximum alignment we can assume.
3360 If PARTIAL and REG are both nonzero, then copy that many of the first
3361 words of X into registers starting with REG, and push the rest of X.
3362 The amount of space pushed is decreased by PARTIAL words,
3363 rounded *down* to a multiple of PARM_BOUNDARY.
3364 REG must be a hard register in this case.
3365 If REG is zero but PARTIAL is not, take any all others actions for an
3366 argument partially in registers, but do not actually load any
3369 EXTRA is the amount in bytes of extra space to leave next to this arg.
3370 This is ignored if an argument block has already been allocated.
3372 On a machine that lacks real push insns, ARGS_ADDR is the address of
3373 the bottom of the argument block for this call. We use indexing off there
3374 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3375 argument block has not been preallocated.
3377 ARGS_SO_FAR is the size of args previously pushed for this call.
3379 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3380 for arguments passed in registers. If nonzero, it will be the number
3381 of bytes required. */
3384 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3385 unsigned int align, int partial, rtx reg, int extra,
3386 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3390 enum direction stack_direction
3391 #ifdef STACK_GROWS_DOWNWARD
3397 /* Decide where to pad the argument: `downward' for below,
3398 `upward' for above, or `none' for don't pad it.
3399 Default is below for small data on big-endian machines; else above. */
3400 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3402 /* Invert direction if stack is post-decrement.
3404 if (STACK_PUSH_CODE == POST_DEC)
3405 if (where_pad != none)
3406 where_pad = (where_pad == downward ? upward : downward);
3408 xinner = x = protect_from_queue (x, 0);
3410 if (mode == BLKmode)
3412 /* Copy a block into the stack, entirely or partially. */
3415 int used = partial * UNITS_PER_WORD;
3419 if (reg && GET_CODE (reg) == PARALLEL)
3421 /* Use the size of the elt to compute offset. */
3422 rtx elt = XEXP (XVECEXP (reg, 0, 0), 0);
3423 used = partial * GET_MODE_SIZE (GET_MODE (elt));
3424 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3427 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3434 /* USED is now the # of bytes we need not copy to the stack
3435 because registers will take care of them. */
3438 xinner = adjust_address (xinner, BLKmode, used);
3440 /* If the partial register-part of the arg counts in its stack size,
3441 skip the part of stack space corresponding to the registers.
3442 Otherwise, start copying to the beginning of the stack space,
3443 by setting SKIP to 0. */
3444 skip = (reg_parm_stack_space == 0) ? 0 : used;
3446 #ifdef PUSH_ROUNDING
3447 /* Do it with several push insns if that doesn't take lots of insns
3448 and if there is no difficulty with push insns that skip bytes
3449 on the stack for alignment purposes. */
3452 && GET_CODE (size) == CONST_INT
3454 && MEM_ALIGN (xinner) >= align
3455 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3456 /* Here we avoid the case of a structure whose weak alignment
3457 forces many pushes of a small amount of data,
3458 and such small pushes do rounding that causes trouble. */
3459 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3460 || align >= BIGGEST_ALIGNMENT
3461 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3462 == (align / BITS_PER_UNIT)))
3463 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3465 /* Push padding now if padding above and stack grows down,
3466 or if padding below and stack grows up.
3467 But if space already allocated, this has already been done. */
3468 if (extra && args_addr == 0
3469 && where_pad != none && where_pad != stack_direction)
3470 anti_adjust_stack (GEN_INT (extra));
3472 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3475 #endif /* PUSH_ROUNDING */
3479 /* Otherwise make space on the stack and copy the data
3480 to the address of that space. */
3482 /* Deduct words put into registers from the size we must copy. */
3485 if (GET_CODE (size) == CONST_INT)
3486 size = GEN_INT (INTVAL (size) - used);
3488 size = expand_binop (GET_MODE (size), sub_optab, size,
3489 GEN_INT (used), NULL_RTX, 0,
3493 /* Get the address of the stack space.
3494 In this case, we do not deal with EXTRA separately.
3495 A single stack adjust will do. */
3498 temp = push_block (size, extra, where_pad == downward);
3501 else if (GET_CODE (args_so_far) == CONST_INT)
3502 temp = memory_address (BLKmode,
3503 plus_constant (args_addr,
3504 skip + INTVAL (args_so_far)));
3506 temp = memory_address (BLKmode,
3507 plus_constant (gen_rtx_PLUS (Pmode,
3512 if (!ACCUMULATE_OUTGOING_ARGS)
3514 /* If the source is referenced relative to the stack pointer,
3515 copy it to another register to stabilize it. We do not need
3516 to do this if we know that we won't be changing sp. */
3518 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3519 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3520 temp = copy_to_reg (temp);
3523 target = gen_rtx_MEM (BLKmode, temp);
3527 set_mem_attributes (target, type, 1);
3528 /* Function incoming arguments may overlap with sibling call
3529 outgoing arguments and we cannot allow reordering of reads
3530 from function arguments with stores to outgoing arguments
3531 of sibling calls. */
3532 set_mem_alias_set (target, 0);
3535 /* ALIGN may well be better aligned than TYPE, e.g. due to
3536 PARM_BOUNDARY. Assume the caller isn't lying. */
3537 set_mem_align (target, align);
3539 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3542 else if (partial > 0)
3544 /* Scalar partly in registers. */
3546 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3549 /* # words of start of argument
3550 that we must make space for but need not store. */
3551 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3552 int args_offset = INTVAL (args_so_far);
3555 /* Push padding now if padding above and stack grows down,
3556 or if padding below and stack grows up.
3557 But if space already allocated, this has already been done. */
3558 if (extra && args_addr == 0
3559 && where_pad != none && where_pad != stack_direction)
3560 anti_adjust_stack (GEN_INT (extra));
3562 /* If we make space by pushing it, we might as well push
3563 the real data. Otherwise, we can leave OFFSET nonzero
3564 and leave the space uninitialized. */
3568 /* Now NOT_STACK gets the number of words that we don't need to
3569 allocate on the stack. */
3570 not_stack = partial - offset;
3572 /* If the partial register-part of the arg counts in its stack size,
3573 skip the part of stack space corresponding to the registers.
3574 Otherwise, start copying to the beginning of the stack space,
3575 by setting SKIP to 0. */
3576 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3578 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3579 x = validize_mem (force_const_mem (mode, x));
3581 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3582 SUBREGs of such registers are not allowed. */
3583 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3584 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3585 x = copy_to_reg (x);
3587 /* Loop over all the words allocated on the stack for this arg. */
3588 /* We can do it by words, because any scalar bigger than a word
3589 has a size a multiple of a word. */
3590 #ifndef PUSH_ARGS_REVERSED
3591 for (i = not_stack; i < size; i++)
3593 for (i = size - 1; i >= not_stack; i--)
3595 if (i >= not_stack + offset)
3596 emit_push_insn (operand_subword_force (x, i, mode),
3597 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3599 GEN_INT (args_offset + ((i - not_stack + skip)
3601 reg_parm_stack_space, alignment_pad);
3608 /* Push padding now if padding above and stack grows down,
3609 or if padding below and stack grows up.
3610 But if space already allocated, this has already been done. */
3611 if (extra && args_addr == 0
3612 && where_pad != none && where_pad != stack_direction)
3613 anti_adjust_stack (GEN_INT (extra));
3615 #ifdef PUSH_ROUNDING
3616 if (args_addr == 0 && PUSH_ARGS)
3617 emit_single_push_insn (mode, x, type);
3621 if (GET_CODE (args_so_far) == CONST_INT)
3623 = memory_address (mode,
3624 plus_constant (args_addr,
3625 INTVAL (args_so_far)));
3627 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3629 dest = gen_rtx_MEM (mode, addr);
3632 set_mem_attributes (dest, type, 1);
3633 /* Function incoming arguments may overlap with sibling call
3634 outgoing arguments and we cannot allow reordering of reads
3635 from function arguments with stores to outgoing arguments
3636 of sibling calls. */
3637 set_mem_alias_set (dest, 0);
3640 emit_move_insn (dest, x);
3644 /* If part should go in registers, copy that part
3645 into the appropriate registers. Do this now, at the end,
3646 since mem-to-mem copies above may do function calls. */
3647 if (partial > 0 && reg != 0)
3649 /* Handle calls that pass values in multiple non-contiguous locations.
3650 The Irix 6 ABI has examples of this. */
3651 if (GET_CODE (reg) == PARALLEL)
3652 emit_group_load (reg, x, type, -1);
3654 move_block_to_reg (REGNO (reg), x, partial, mode);
3657 if (extra && args_addr == 0 && where_pad == stack_direction)
3658 anti_adjust_stack (GEN_INT (extra));
3660 if (alignment_pad && args_addr == 0)
3661 anti_adjust_stack (alignment_pad);
3664 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3668 get_subtarget (rtx x)
3671 /* Only registers can be subtargets. */
3673 /* If the register is readonly, it can't be set more than once. */
3674 || RTX_UNCHANGING_P (x)
3675 /* Don't use hard regs to avoid extending their life. */
3676 || REGNO (x) < FIRST_PSEUDO_REGISTER
3677 /* Avoid subtargets inside loops,
3678 since they hide some invariant expressions. */
3679 || preserve_subexpressions_p ())
3683 /* Expand an assignment that stores the value of FROM into TO.
3684 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3685 (This may contain a QUEUED rtx;
3686 if the value is constant, this rtx is a constant.)
3687 Otherwise, the returned value is NULL_RTX. */
3690 expand_assignment (tree to, tree from, int want_value)
3695 /* Don't crash if the lhs of the assignment was erroneous. */
3697 if (TREE_CODE (to) == ERROR_MARK)
3699 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3700 return want_value ? result : NULL_RTX;
3703 /* Assignment of a structure component needs special treatment
3704 if the structure component's rtx is not simply a MEM.
3705 Assignment of an array element at a constant index, and assignment of
3706 an array element in an unaligned packed structure field, has the same
3709 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3710 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF
3711 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
3713 enum machine_mode mode1;
3714 HOST_WIDE_INT bitsize, bitpos;
3722 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3723 &unsignedp, &volatilep);
3725 /* If we are going to use store_bit_field and extract_bit_field,
3726 make sure to_rtx will be safe for multiple use. */
3728 if (mode1 == VOIDmode && want_value)
3729 tem = stabilize_reference (tem);
3731 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3735 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
3737 if (!MEM_P (to_rtx))
3740 #ifdef POINTERS_EXTEND_UNSIGNED
3741 if (GET_MODE (offset_rtx) != Pmode)
3742 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
3744 if (GET_MODE (offset_rtx) != ptr_mode)
3745 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3748 /* A constant address in TO_RTX can have VOIDmode, we must not try
3749 to call force_reg for that case. Avoid that case. */
3751 && GET_MODE (to_rtx) == BLKmode
3752 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3754 && (bitpos % bitsize) == 0
3755 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3756 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
3758 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3762 to_rtx = offset_address (to_rtx, offset_rtx,
3763 highest_pow2_factor_for_target (to,
3769 /* If the field is at offset zero, we could have been given the
3770 DECL_RTX of the parent struct. Don't munge it. */
3771 to_rtx = shallow_copy_rtx (to_rtx);
3773 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
3776 /* Deal with volatile and readonly fields. The former is only done
3777 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3778 if (volatilep && MEM_P (to_rtx))
3780 if (to_rtx == orig_to_rtx)
3781 to_rtx = copy_rtx (to_rtx);
3782 MEM_VOLATILE_P (to_rtx) = 1;
3785 if (TREE_CODE (to) == COMPONENT_REF
3786 && TREE_READONLY (TREE_OPERAND (to, 1))
3787 /* We can't assert that a MEM won't be set more than once
3788 if the component is not addressable because another
3789 non-addressable component may be referenced by the same MEM. */
3790 && ! (MEM_P (to_rtx) && ! can_address_p (to)))
3792 if (to_rtx == orig_to_rtx)
3793 to_rtx = copy_rtx (to_rtx);
3794 RTX_UNCHANGING_P (to_rtx) = 1;
3797 if (MEM_P (to_rtx) && ! can_address_p (to))
3799 if (to_rtx == orig_to_rtx)
3800 to_rtx = copy_rtx (to_rtx);
3801 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3804 /* Disabled temporarily. GET_MODE (to_rtx) is often not the right
3806 while (0 && mode1 == VOIDmode && !want_value
3807 && bitpos + bitsize <= BITS_PER_WORD
3808 && bitsize < BITS_PER_WORD
3809 && GET_MODE_BITSIZE (GET_MODE (to_rtx)) <= BITS_PER_WORD
3810 && !TREE_SIDE_EFFECTS (to)
3811 && !TREE_THIS_VOLATILE (to))
3815 HOST_WIDE_INT count = bitpos;
3820 if (TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE
3821 || TREE_CODE_CLASS (TREE_CODE (src)) != '2')
3824 op0 = TREE_OPERAND (src, 0);
3825 op1 = TREE_OPERAND (src, 1);
3828 if (! operand_equal_p (to, op0, 0))
3831 if (BYTES_BIG_ENDIAN)
3832 count = GET_MODE_BITSIZE (GET_MODE (to_rtx)) - bitpos - bitsize;
3834 /* Special case some bitfield op= exp. */
3835 switch (TREE_CODE (src))
3842 /* For now, just optimize the case of the topmost bitfield
3843 where we don't need to do any masking and also
3844 1 bit bitfields where xor can be used.
3845 We might win by one instruction for the other bitfields
3846 too if insv/extv instructions aren't used, so that
3847 can be added later. */
3848 if (count + bitsize != GET_MODE_BITSIZE (GET_MODE (to_rtx))
3849 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
3851 value = expand_expr (op1, NULL_RTX, VOIDmode, 0);
3852 value = protect_from_queue (value, 0);
3853 to_rtx = protect_from_queue (to_rtx, 1);
3854 binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab;
3856 && count + bitsize != GET_MODE_BITSIZE (GET_MODE (to_rtx)))
3858 value = expand_and (GET_MODE (to_rtx), value, const1_rtx,
3862 value = expand_shift (LSHIFT_EXPR, GET_MODE (to_rtx),
3863 value, build_int_2 (count, 0),
3865 result = expand_binop (GET_MODE (to_rtx), binop, to_rtx,
3866 value, to_rtx, 1, OPTAB_WIDEN);
3867 if (result != to_rtx)
3868 emit_move_insn (to_rtx, result);
3879 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3881 /* Spurious cast for HPUX compiler. */
3882 ? ((enum machine_mode)
3883 TYPE_MODE (TREE_TYPE (to)))
3885 unsignedp, TREE_TYPE (tem), get_alias_set (to));
3887 preserve_temp_slots (result);
3891 /* If the value is meaningful, convert RESULT to the proper mode.
3892 Otherwise, return nothing. */
3893 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3894 TYPE_MODE (TREE_TYPE (from)),
3896 TYPE_UNSIGNED (TREE_TYPE (to)))
3900 /* If the rhs is a function call and its value is not an aggregate,
3901 call the function before we start to compute the lhs.
3902 This is needed for correct code for cases such as
3903 val = setjmp (buf) on machines where reference to val
3904 requires loading up part of an address in a separate insn.
3906 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3907 since it might be a promoted variable where the zero- or sign- extension
3908 needs to be done. Handling this in the normal way is safe because no
3909 computation is done before the call. */
3910 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
3911 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3912 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3913 && REG_P (DECL_RTL (to))))
3918 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3920 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3922 /* Handle calls that return values in multiple non-contiguous locations.
3923 The Irix 6 ABI has examples of this. */
3924 if (GET_CODE (to_rtx) == PARALLEL)
3925 emit_group_load (to_rtx, value, TREE_TYPE (from),
3926 int_size_in_bytes (TREE_TYPE (from)));
3927 else if (GET_MODE (to_rtx) == BLKmode)
3928 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
3931 if (POINTER_TYPE_P (TREE_TYPE (to)))
3932 value = convert_memory_address (GET_MODE (to_rtx), value);
3933 emit_move_insn (to_rtx, value);
3935 preserve_temp_slots (to_rtx);
3938 return want_value ? to_rtx : NULL_RTX;
3941 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3942 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3945 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3947 /* Don't move directly into a return register. */
3948 if (TREE_CODE (to) == RESULT_DECL
3949 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
3954 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3956 if (GET_CODE (to_rtx) == PARALLEL)
3957 emit_group_load (to_rtx, temp, TREE_TYPE (from),
3958 int_size_in_bytes (TREE_TYPE (from)));
3960 emit_move_insn (to_rtx, temp);
3962 preserve_temp_slots (to_rtx);
3965 return want_value ? to_rtx : NULL_RTX;
3968 /* In case we are returning the contents of an object which overlaps
3969 the place the value is being stored, use a safe function when copying
3970 a value through a pointer into a structure value return block. */
3971 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3972 && current_function_returns_struct
3973 && !current_function_returns_pcc_struct)
3978 size = expr_size (from);
3979 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
3981 emit_library_call (memmove_libfunc, LCT_NORMAL,
3982 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3983 XEXP (from_rtx, 0), Pmode,
3984 convert_to_mode (TYPE_MODE (sizetype),
3985 size, TYPE_UNSIGNED (sizetype)),
3986 TYPE_MODE (sizetype));
3988 preserve_temp_slots (to_rtx);
3991 return want_value ? to_rtx : NULL_RTX;
3994 /* Compute FROM and store the value in the rtx we got. */
3997 result = store_expr (from, to_rtx, want_value);
3998 preserve_temp_slots (result);
4001 return want_value ? result : NULL_RTX;
4004 /* Generate code for computing expression EXP,
4005 and storing the value into TARGET.
4006 TARGET may contain a QUEUED rtx.
4008 If WANT_VALUE & 1 is nonzero, return a copy of the value
4009 not in TARGET, so that we can be sure to use the proper
4010 value in a containing expression even if TARGET has something
4011 else stored in it. If possible, we copy the value through a pseudo
4012 and return that pseudo. Or, if the value is constant, we try to
4013 return the constant. In some cases, we return a pseudo
4014 copied *from* TARGET.
4016 If the mode is BLKmode then we may return TARGET itself.
4017 It turns out that in BLKmode it doesn't cause a problem.
4018 because C has no operators that could combine two different
4019 assignments into the same BLKmode object with different values
4020 with no sequence point. Will other languages need this to
4023 If WANT_VALUE & 1 is 0, we return NULL, to make sure
4024 to catch quickly any cases where the caller uses the value
4025 and fails to set WANT_VALUE.
4027 If WANT_VALUE & 2 is set, this is a store into a call param on the
4028 stack, and block moves may need to be treated specially. */
4031 store_expr (tree exp, rtx target, int want_value)
4034 rtx alt_rtl = NULL_RTX;
4035 rtx mark = mark_queue ();
4036 int dont_return_target = 0;
4037 int dont_store_target = 0;
4039 if (VOID_TYPE_P (TREE_TYPE (exp)))
4041 /* C++ can generate ?: expressions with a throw expression in one
4042 branch and an rvalue in the other. Here, we resolve attempts to
4043 store the throw expression's nonexistent result. */
4046 expand_expr (exp, const0_rtx, VOIDmode, 0);
4049 if (TREE_CODE (exp) == COMPOUND_EXPR)
4051 /* Perform first part of compound expression, then assign from second
4053 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4054 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4056 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4058 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4060 /* For conditional expression, get safe form of the target. Then
4061 test the condition, doing the appropriate assignment on either
4062 side. This avoids the creation of unnecessary temporaries.
4063 For non-BLKmode, it is more efficient not to do this. */
4065 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4068 target = protect_from_queue (target, 1);
4070 do_pending_stack_adjust ();
4072 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4073 store_expr (TREE_OPERAND (exp, 1), target, want_value & 2);
4075 emit_jump_insn (gen_jump (lab2));
4078 store_expr (TREE_OPERAND (exp, 2), target, want_value & 2);
4083 return want_value & 1 ? target : NULL_RTX;
4085 else if (queued_subexp_p (target))
4086 /* If target contains a postincrement, let's not risk
4087 using it as the place to generate the rhs. */
4089 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4091 /* Expand EXP into a new pseudo. */
4092 temp = gen_reg_rtx (GET_MODE (target));
4093 temp = expand_expr (exp, temp, GET_MODE (target),
4095 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4098 temp = expand_expr (exp, NULL_RTX, GET_MODE (target),
4100 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4102 /* If target is volatile, ANSI requires accessing the value
4103 *from* the target, if it is accessed. So make that happen.
4104 In no case return the target itself. */
4105 if (! MEM_VOLATILE_P (target) && (want_value & 1) != 0)
4106 dont_return_target = 1;
4108 else if ((want_value & 1) != 0
4110 && ! MEM_VOLATILE_P (target)
4111 && GET_MODE (target) != BLKmode)
4112 /* If target is in memory and caller wants value in a register instead,
4113 arrange that. Pass TARGET as target for expand_expr so that,
4114 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4115 We know expand_expr will not use the target in that case.
4116 Don't do this if TARGET is volatile because we are supposed
4117 to write it and then read it. */
4119 temp = expand_expr (exp, target, GET_MODE (target),
4120 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4121 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4123 /* If TEMP is already in the desired TARGET, only copy it from
4124 memory and don't store it there again. */
4126 || (rtx_equal_p (temp, target)
4127 && ! side_effects_p (temp) && ! side_effects_p (target)))
4128 dont_store_target = 1;
4129 temp = copy_to_reg (temp);
4131 dont_return_target = 1;
4133 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4134 /* If this is a scalar in a register that is stored in a wider mode
4135 than the declared mode, compute the result into its declared mode
4136 and then convert to the wider mode. Our value is the computed
4139 rtx inner_target = 0;
4141 /* If we don't want a value, we can do the conversion inside EXP,
4142 which will often result in some optimizations. Do the conversion
4143 in two steps: first change the signedness, if needed, then
4144 the extend. But don't do this if the type of EXP is a subtype
4145 of something else since then the conversion might involve
4146 more than just converting modes. */
4147 if ((want_value & 1) == 0
4148 && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4149 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4151 if (TYPE_UNSIGNED (TREE_TYPE (exp))
4152 != SUBREG_PROMOTED_UNSIGNED_P (target))
4154 (lang_hooks.types.signed_or_unsigned_type
4155 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4157 exp = convert (lang_hooks.types.type_for_mode
4158 (GET_MODE (SUBREG_REG (target)),
4159 SUBREG_PROMOTED_UNSIGNED_P (target)),
4162 inner_target = SUBREG_REG (target);
4165 temp = expand_expr (exp, inner_target, VOIDmode,
4166 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4168 /* If TEMP is a MEM and we want a result value, make the access
4169 now so it gets done only once. Strictly speaking, this is
4170 only necessary if the MEM is volatile, or if the address
4171 overlaps TARGET. But not performing the load twice also
4172 reduces the amount of rtl we generate and then have to CSE. */
4173 if (MEM_P (temp) && (want_value & 1) != 0)
4174 temp = copy_to_reg (temp);
4176 /* If TEMP is a VOIDmode constant, use convert_modes to make
4177 sure that we properly convert it. */
4178 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4180 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4181 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4182 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4183 GET_MODE (target), temp,
4184 SUBREG_PROMOTED_UNSIGNED_P (target));
4187 convert_move (SUBREG_REG (target), temp,
4188 SUBREG_PROMOTED_UNSIGNED_P (target));
4190 /* If we promoted a constant, change the mode back down to match
4191 target. Otherwise, the caller might get confused by a result whose
4192 mode is larger than expected. */
4194 if ((want_value & 1) != 0 && GET_MODE (temp) != GET_MODE (target))
4196 if (GET_MODE (temp) != VOIDmode)
4198 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4199 SUBREG_PROMOTED_VAR_P (temp) = 1;
4200 SUBREG_PROMOTED_UNSIGNED_SET (temp,
4201 SUBREG_PROMOTED_UNSIGNED_P (target));
4204 temp = convert_modes (GET_MODE (target),
4205 GET_MODE (SUBREG_REG (target)),
4206 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4209 return want_value & 1 ? temp : NULL_RTX;
4213 temp = expand_expr_real (exp, target, GET_MODE (target),
4215 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
4217 /* Return TARGET if it's a specified hardware register.
4218 If TARGET is a volatile mem ref, either return TARGET
4219 or return a reg copied *from* TARGET; ANSI requires this.
4221 Otherwise, if TEMP is not TARGET, return TEMP
4222 if it is constant (for efficiency),
4223 or if we really want the correct value. */
4224 if (!(target && REG_P (target)
4225 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4226 && !(MEM_P (target) && MEM_VOLATILE_P (target))
4227 && ! rtx_equal_p (temp, target)
4228 && (CONSTANT_P (temp) || (want_value & 1) != 0))
4229 dont_return_target = 1;
4232 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4233 the same as that of TARGET, adjust the constant. This is needed, for
4234 example, in case it is a CONST_DOUBLE and we want only a word-sized
4236 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4237 && TREE_CODE (exp) != ERROR_MARK
4238 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4239 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4240 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
4242 /* If value was not generated in the target, store it there.
4243 Convert the value to TARGET's type first if necessary and emit the
4244 pending incrementations that have been queued when expanding EXP.
4245 Note that we cannot emit the whole queue blindly because this will
4246 effectively disable the POST_INC optimization later.
4248 If TEMP and TARGET compare equal according to rtx_equal_p, but
4249 one or both of them are volatile memory refs, we have to distinguish
4251 - expand_expr has used TARGET. In this case, we must not generate
4252 another copy. This can be detected by TARGET being equal according
4254 - expand_expr has not used TARGET - that means that the source just
4255 happens to have the same RTX form. Since temp will have been created
4256 by expand_expr, it will compare unequal according to == .
4257 We must generate a copy in this case, to reach the correct number
4258 of volatile memory references. */
4260 if ((! rtx_equal_p (temp, target)
4261 || (temp != target && (side_effects_p (temp)
4262 || side_effects_p (target))))
4263 && TREE_CODE (exp) != ERROR_MARK
4264 && ! dont_store_target
4265 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4266 but TARGET is not valid memory reference, TEMP will differ
4267 from TARGET although it is really the same location. */
4268 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
4269 /* If there's nothing to copy, don't bother. Don't call expr_size
4270 unless necessary, because some front-ends (C++) expr_size-hook
4271 aborts on objects that are not supposed to be bit-copied or
4273 && expr_size (exp) != const0_rtx)
4275 emit_insns_enqueued_after_mark (mark);
4276 target = protect_from_queue (target, 1);
4277 temp = protect_from_queue (temp, 0);
4278 if (GET_MODE (temp) != GET_MODE (target)
4279 && GET_MODE (temp) != VOIDmode)
4281 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4282 if (dont_return_target)
4284 /* In this case, we will return TEMP,
4285 so make sure it has the proper mode.
4286 But don't forget to store the value into TARGET. */
4287 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4288 emit_move_insn (target, temp);
4291 convert_move (target, temp, unsignedp);
4294 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4296 /* Handle copying a string constant into an array. The string
4297 constant may be shorter than the array. So copy just the string's
4298 actual length, and clear the rest. First get the size of the data
4299 type of the string, which is actually the size of the target. */
4300 rtx size = expr_size (exp);
4302 if (GET_CODE (size) == CONST_INT
4303 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4304 emit_block_move (target, temp, size,
4306 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4309 /* Compute the size of the data to copy from the string. */
4311 = size_binop (MIN_EXPR,
4312 make_tree (sizetype, size),
4313 size_int (TREE_STRING_LENGTH (exp)));
4315 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4317 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4320 /* Copy that much. */
4321 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4322 TYPE_UNSIGNED (sizetype));
4323 emit_block_move (target, temp, copy_size_rtx,
4325 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4327 /* Figure out how much is left in TARGET that we have to clear.
4328 Do all calculations in ptr_mode. */
4329 if (GET_CODE (copy_size_rtx) == CONST_INT)
4331 size = plus_constant (size, -INTVAL (copy_size_rtx));
4332 target = adjust_address (target, BLKmode,
4333 INTVAL (copy_size_rtx));
4337 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4338 copy_size_rtx, NULL_RTX, 0,
4341 #ifdef POINTERS_EXTEND_UNSIGNED
4342 if (GET_MODE (copy_size_rtx) != Pmode)
4343 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4344 TYPE_UNSIGNED (sizetype));
4347 target = offset_address (target, copy_size_rtx,
4348 highest_pow2_factor (copy_size));
4349 label = gen_label_rtx ();
4350 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4351 GET_MODE (size), 0, label);
4354 if (size != const0_rtx)
4355 clear_storage (target, size);
4361 /* Handle calls that return values in multiple non-contiguous locations.
4362 The Irix 6 ABI has examples of this. */
4363 else if (GET_CODE (target) == PARALLEL)
4364 emit_group_load (target, temp, TREE_TYPE (exp),
4365 int_size_in_bytes (TREE_TYPE (exp)));
4366 else if (GET_MODE (temp) == BLKmode)
4367 emit_block_move (target, temp, expr_size (exp),
4369 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4372 temp = force_operand (temp, target);
4374 emit_move_insn (target, temp);
4378 /* If we don't want a value, return NULL_RTX. */
4379 if ((want_value & 1) == 0)
4382 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4383 ??? The latter test doesn't seem to make sense. */
4384 else if (dont_return_target && !MEM_P (temp))
4387 /* Return TARGET itself if it is a hard register. */
4388 else if ((want_value & 1) != 0
4389 && GET_MODE (target) != BLKmode
4390 && ! (REG_P (target)
4391 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4392 return copy_to_reg (target);
4398 /* Examine CTOR. Discover how many scalar fields are set to nonzero
4399 values and place it in *P_NZ_ELTS. Discover how many scalar fields
4400 are set to non-constant values and place it in *P_NC_ELTS. */
4403 categorize_ctor_elements_1 (tree ctor, HOST_WIDE_INT *p_nz_elts,
4404 HOST_WIDE_INT *p_nc_elts)
4406 HOST_WIDE_INT nz_elts, nc_elts;
4412 for (list = CONSTRUCTOR_ELTS (ctor); list; list = TREE_CHAIN (list))
4414 tree value = TREE_VALUE (list);
4415 tree purpose = TREE_PURPOSE (list);
4419 if (TREE_CODE (purpose) == RANGE_EXPR)
4421 tree lo_index = TREE_OPERAND (purpose, 0);
4422 tree hi_index = TREE_OPERAND (purpose, 1);
4424 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4425 mult = (tree_low_cst (hi_index, 1)
4426 - tree_low_cst (lo_index, 1) + 1);
4429 switch (TREE_CODE (value))
4433 HOST_WIDE_INT nz = 0, nc = 0;
4434 categorize_ctor_elements_1 (value, &nz, &nc);
4435 nz_elts += mult * nz;
4436 nc_elts += mult * nc;
4442 if (!initializer_zerop (value))
4446 if (!initializer_zerop (TREE_REALPART (value)))
4448 if (!initializer_zerop (TREE_IMAGPART (value)))
4454 for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
4455 if (!initializer_zerop (TREE_VALUE (v)))
4462 if (!initializer_constant_valid_p (value, TREE_TYPE (value)))
4468 *p_nz_elts += nz_elts;
4469 *p_nc_elts += nc_elts;
4473 categorize_ctor_elements (tree ctor, HOST_WIDE_INT *p_nz_elts,
4474 HOST_WIDE_INT *p_nc_elts)
4478 categorize_ctor_elements_1 (ctor, p_nz_elts, p_nc_elts);
4481 /* Count the number of scalars in TYPE. Return -1 on overflow or
4485 count_type_elements (tree type)
4487 const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
4488 switch (TREE_CODE (type))
4492 tree telts = array_type_nelts (type);
4493 if (telts && host_integerp (telts, 1))
4495 HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1;
4496 HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type));
4499 else if (max / n > m)
4507 HOST_WIDE_INT n = 0, t;
4510 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4511 if (TREE_CODE (f) == FIELD_DECL)
4513 t = count_type_elements (TREE_TYPE (f));
4523 case QUAL_UNION_TYPE:
4525 /* Ho hum. How in the world do we guess here? Clearly it isn't
4526 right to count the fields. Guess based on the number of words. */
4527 HOST_WIDE_INT n = int_size_in_bytes (type);
4530 return n / UNITS_PER_WORD;
4537 /* ??? This is broke. We should encode the vector width in the tree. */
4538 return GET_MODE_NUNITS (TYPE_MODE (type));
4547 case REFERENCE_TYPE:
4561 /* Return 1 if EXP contains mostly (3/4) zeros. */
4564 mostly_zeros_p (tree exp)
4566 if (TREE_CODE (exp) == CONSTRUCTOR)
4569 HOST_WIDE_INT nz_elts, nc_elts, elts;
4571 /* If there are no ranges of true bits, it is all zero. */
4572 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4573 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4575 categorize_ctor_elements (exp, &nz_elts, &nc_elts);
4576 elts = count_type_elements (TREE_TYPE (exp));
4578 return nz_elts < elts / 4;
4581 return initializer_zerop (exp);
4584 /* Helper function for store_constructor.
4585 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4586 TYPE is the type of the CONSTRUCTOR, not the element type.
4587 CLEARED is as for store_constructor.
4588 ALIAS_SET is the alias set to use for any stores.
4590 This provides a recursive shortcut back to store_constructor when it isn't
4591 necessary to go through store_field. This is so that we can pass through
4592 the cleared field to let store_constructor know that we may not have to
4593 clear a substructure if the outer structure has already been cleared. */
4596 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4597 HOST_WIDE_INT bitpos, enum machine_mode mode,
4598 tree exp, tree type, int cleared, int alias_set)
4600 if (TREE_CODE (exp) == CONSTRUCTOR
4601 /* We can only call store_constructor recursively if the size and
4602 bit position are on a byte boundary. */
4603 && bitpos % BITS_PER_UNIT == 0
4604 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
4605 /* If we have a nonzero bitpos for a register target, then we just
4606 let store_field do the bitfield handling. This is unlikely to
4607 generate unnecessary clear instructions anyways. */
4608 && (bitpos == 0 || MEM_P (target)))
4612 = adjust_address (target,
4613 GET_MODE (target) == BLKmode
4615 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4616 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4619 /* Update the alias set, if required. */
4620 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
4621 && MEM_ALIAS_SET (target) != 0)
4623 target = copy_rtx (target);
4624 set_mem_alias_set (target, alias_set);
4627 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4630 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4634 /* Store the value of constructor EXP into the rtx TARGET.
4635 TARGET is either a REG or a MEM; we know it cannot conflict, since
4636 safe_from_p has been called.
4637 CLEARED is true if TARGET is known to have been zero'd.
4638 SIZE is the number of bytes of TARGET we are allowed to modify: this
4639 may not be the same as the size of EXP if we are assigning to a field
4640 which has been packed to exclude padding bits. */
4643 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
4645 tree type = TREE_TYPE (exp);
4646 #ifdef WORD_REGISTER_OPERATIONS
4647 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4650 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4651 || TREE_CODE (type) == QUAL_UNION_TYPE)
4655 /* If size is zero or the target is already cleared, do nothing. */
4656 if (size == 0 || cleared)
4658 /* We either clear the aggregate or indicate the value is dead. */
4659 else if ((TREE_CODE (type) == UNION_TYPE
4660 || TREE_CODE (type) == QUAL_UNION_TYPE)
4661 && ! CONSTRUCTOR_ELTS (exp))
4662 /* If the constructor is empty, clear the union. */
4664 clear_storage (target, expr_size (exp));
4668 /* If we are building a static constructor into a register,
4669 set the initial value as zero so we can fold the value into
4670 a constant. But if more than one register is involved,
4671 this probably loses. */
4672 else if (REG_P (target) && TREE_STATIC (exp)
4673 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4675 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4679 /* If the constructor has fewer fields than the structure
4680 or if we are initializing the structure to mostly zeros,
4681 clear the whole structure first. Don't do this if TARGET is a
4682 register whose mode size isn't equal to SIZE since clear_storage
4683 can't handle this case. */
4685 && ((list_length (CONSTRUCTOR_ELTS (exp)) != fields_length (type))
4686 || mostly_zeros_p (exp))
4688 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4691 rtx xtarget = target;
4693 if (readonly_fields_p (type))
4695 xtarget = copy_rtx (xtarget);
4696 RTX_UNCHANGING_P (xtarget) = 1;
4699 clear_storage (xtarget, GEN_INT (size));
4704 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4706 /* Store each element of the constructor into
4707 the corresponding field of TARGET. */
4709 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4711 tree field = TREE_PURPOSE (elt);
4712 tree value = TREE_VALUE (elt);
4713 enum machine_mode mode;
4714 HOST_WIDE_INT bitsize;
4715 HOST_WIDE_INT bitpos = 0;
4717 rtx to_rtx = target;
4719 /* Just ignore missing fields.
4720 We cleared the whole structure, above,
4721 if any fields are missing. */
4725 if (cleared && initializer_zerop (value))
4728 if (host_integerp (DECL_SIZE (field), 1))
4729 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4733 mode = DECL_MODE (field);
4734 if (DECL_BIT_FIELD (field))
4737 offset = DECL_FIELD_OFFSET (field);
4738 if (host_integerp (offset, 0)
4739 && host_integerp (bit_position (field), 0))
4741 bitpos = int_bit_position (field);
4745 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4752 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
4753 make_tree (TREE_TYPE (exp),
4756 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4757 if (!MEM_P (to_rtx))
4760 #ifdef POINTERS_EXTEND_UNSIGNED
4761 if (GET_MODE (offset_rtx) != Pmode)
4762 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4764 if (GET_MODE (offset_rtx) != ptr_mode)
4765 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4768 to_rtx = offset_address (to_rtx, offset_rtx,
4769 highest_pow2_factor (offset));
4772 if (TREE_READONLY (field))
4775 to_rtx = copy_rtx (to_rtx);
4777 RTX_UNCHANGING_P (to_rtx) = 1;
4780 #ifdef WORD_REGISTER_OPERATIONS
4781 /* If this initializes a field that is smaller than a word, at the
4782 start of a word, try to widen it to a full word.
4783 This special case allows us to output C++ member function
4784 initializations in a form that the optimizers can understand. */
4786 && bitsize < BITS_PER_WORD
4787 && bitpos % BITS_PER_WORD == 0
4788 && GET_MODE_CLASS (mode) == MODE_INT
4789 && TREE_CODE (value) == INTEGER_CST
4791 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4793 tree type = TREE_TYPE (value);
4795 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4797 type = lang_hooks.types.type_for_size
4798 (BITS_PER_WORD, TYPE_UNSIGNED (type));
4799 value = convert (type, value);
4802 if (BYTES_BIG_ENDIAN)
4804 = fold (build (LSHIFT_EXPR, type, value,
4805 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4806 bitsize = BITS_PER_WORD;
4811 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4812 && DECL_NONADDRESSABLE_P (field))
4814 to_rtx = copy_rtx (to_rtx);
4815 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4818 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4819 value, type, cleared,
4820 get_alias_set (TREE_TYPE (field)));
4823 else if (TREE_CODE (type) == ARRAY_TYPE
4824 || TREE_CODE (type) == VECTOR_TYPE)
4830 tree elttype = TREE_TYPE (type);
4832 HOST_WIDE_INT minelt = 0;
4833 HOST_WIDE_INT maxelt = 0;
4837 unsigned n_elts = 0;
4839 if (TREE_CODE (type) == ARRAY_TYPE)
4840 domain = TYPE_DOMAIN (type);
4842 /* Vectors do not have domains; look up the domain of
4843 the array embedded in the debug representation type.
4844 FIXME Would probably be more efficient to treat vectors
4845 separately from arrays. */
4847 domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
4848 domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
4849 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
4851 enum machine_mode mode = GET_MODE (target);
4853 icode = (int) vec_init_optab->handlers[mode].insn_code;
4854 if (icode != CODE_FOR_nothing)
4858 elt_size = GET_MODE_SIZE (GET_MODE_INNER (mode));
4859 n_elts = (GET_MODE_SIZE (mode) / elt_size);
4860 vector = alloca (n_elts);
4861 for (i = 0; i < n_elts; i++)
4862 vector [i] = CONST0_RTX (GET_MODE_INNER (mode));
4867 const_bounds_p = (TYPE_MIN_VALUE (domain)
4868 && TYPE_MAX_VALUE (domain)
4869 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4870 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4872 /* If we have constant bounds for the range of the type, get them. */
4875 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4876 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4879 /* If the constructor has fewer elements than the array,
4880 clear the whole array first. Similarly if this is
4881 static constructor of a non-BLKmode object. */
4882 if (cleared || (REG_P (target) && TREE_STATIC (exp)))
4886 HOST_WIDE_INT count = 0, zero_count = 0;
4887 need_to_clear = ! const_bounds_p;
4889 /* This loop is a more accurate version of the loop in
4890 mostly_zeros_p (it handles RANGE_EXPR in an index).
4891 It is also needed to check for missing elements. */
4892 for (elt = CONSTRUCTOR_ELTS (exp);
4893 elt != NULL_TREE && ! need_to_clear;
4894 elt = TREE_CHAIN (elt))
4896 tree index = TREE_PURPOSE (elt);
4897 HOST_WIDE_INT this_node_count;
4899 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4901 tree lo_index = TREE_OPERAND (index, 0);
4902 tree hi_index = TREE_OPERAND (index, 1);
4904 if (! host_integerp (lo_index, 1)
4905 || ! host_integerp (hi_index, 1))
4911 this_node_count = (tree_low_cst (hi_index, 1)
4912 - tree_low_cst (lo_index, 1) + 1);
4915 this_node_count = 1;
4917 count += this_node_count;
4918 if (mostly_zeros_p (TREE_VALUE (elt)))
4919 zero_count += this_node_count;
4922 /* Clear the entire array first if there are any missing elements,
4923 or if the incidence of zero elements is >= 75%. */
4925 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4929 if (need_to_clear && size > 0 && !vector)
4934 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4936 clear_storage (target, GEN_INT (size));
4940 else if (REG_P (target))
4941 /* Inform later passes that the old value is dead. */
4942 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4944 /* Store each element of the constructor into
4945 the corresponding element of TARGET, determined
4946 by counting the elements. */
4947 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4949 elt = TREE_CHAIN (elt), i++)
4951 enum machine_mode mode;
4952 HOST_WIDE_INT bitsize;
4953 HOST_WIDE_INT bitpos;
4955 tree value = TREE_VALUE (elt);
4956 tree index = TREE_PURPOSE (elt);
4957 rtx xtarget = target;
4959 if (cleared && initializer_zerop (value))
4962 unsignedp = TYPE_UNSIGNED (elttype);
4963 mode = TYPE_MODE (elttype);
4964 if (mode == BLKmode)
4965 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4966 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4969 bitsize = GET_MODE_BITSIZE (mode);
4971 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4973 tree lo_index = TREE_OPERAND (index, 0);
4974 tree hi_index = TREE_OPERAND (index, 1);
4975 rtx index_r, pos_rtx;
4976 HOST_WIDE_INT lo, hi, count;
4982 /* If the range is constant and "small", unroll the loop. */
4984 && host_integerp (lo_index, 0)
4985 && host_integerp (hi_index, 0)
4986 && (lo = tree_low_cst (lo_index, 0),
4987 hi = tree_low_cst (hi_index, 0),
4988 count = hi - lo + 1,
4991 || (host_integerp (TYPE_SIZE (elttype), 1)
4992 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4995 lo -= minelt; hi -= minelt;
4996 for (; lo <= hi; lo++)
4998 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5001 && !MEM_KEEP_ALIAS_SET_P (target)
5002 && TREE_CODE (type) == ARRAY_TYPE
5003 && TYPE_NONALIASED_COMPONENT (type))
5005 target = copy_rtx (target);
5006 MEM_KEEP_ALIAS_SET_P (target) = 1;
5009 store_constructor_field
5010 (target, bitsize, bitpos, mode, value, type, cleared,
5011 get_alias_set (elttype));
5016 rtx loop_start = gen_label_rtx ();
5017 rtx loop_end = gen_label_rtx ();
5020 expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
5021 unsignedp = TYPE_UNSIGNED (domain);
5023 index = build_decl (VAR_DECL, NULL_TREE, domain);
5026 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5028 SET_DECL_RTL (index, index_r);
5029 store_expr (lo_index, index_r, 0);
5031 /* Build the head of the loop. */
5032 do_pending_stack_adjust ();
5034 emit_label (loop_start);
5036 /* Assign value to element index. */
5038 = convert (ssizetype,
5039 fold (build (MINUS_EXPR, TREE_TYPE (index),
5040 index, TYPE_MIN_VALUE (domain))));
5041 position = size_binop (MULT_EXPR, position,
5043 TYPE_SIZE_UNIT (elttype)));
5045 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
5046 xtarget = offset_address (target, pos_rtx,
5047 highest_pow2_factor (position));
5048 xtarget = adjust_address (xtarget, mode, 0);
5049 if (TREE_CODE (value) == CONSTRUCTOR)
5050 store_constructor (value, xtarget, cleared,
5051 bitsize / BITS_PER_UNIT);
5053 store_expr (value, xtarget, 0);
5055 /* Generate a conditional jump to exit the loop. */
5056 exit_cond = build (LT_EXPR, integer_type_node,
5058 jumpif (exit_cond, loop_end);
5060 /* Update the loop counter, and jump to the head of
5062 expand_increment (build (PREINCREMENT_EXPR,
5064 index, integer_one_node), 0, 0);
5065 emit_jump (loop_start);
5067 /* Build the end of the loop. */
5068 emit_label (loop_end);
5071 else if ((index != 0 && ! host_integerp (index, 0))
5072 || ! host_integerp (TYPE_SIZE (elttype), 1))
5080 index = ssize_int (1);
5083 index = convert (ssizetype,
5084 fold (build (MINUS_EXPR, index,
5085 TYPE_MIN_VALUE (domain))));
5087 position = size_binop (MULT_EXPR, index,
5089 TYPE_SIZE_UNIT (elttype)));
5090 xtarget = offset_address (target,
5091 expand_expr (position, 0, VOIDmode, 0),
5092 highest_pow2_factor (position));
5093 xtarget = adjust_address (xtarget, mode, 0);
5094 store_expr (value, xtarget, 0);
5101 pos = tree_low_cst (index, 0) - minelt;
5104 vector[pos] = expand_expr (value, NULL_RTX, VOIDmode, 0);
5109 bitpos = ((tree_low_cst (index, 0) - minelt)
5110 * tree_low_cst (TYPE_SIZE (elttype), 1));
5112 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5114 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
5115 && TREE_CODE (type) == ARRAY_TYPE
5116 && TYPE_NONALIASED_COMPONENT (type))
5118 target = copy_rtx (target);
5119 MEM_KEEP_ALIAS_SET_P (target) = 1;
5121 store_constructor_field (target, bitsize, bitpos, mode, value,
5122 type, cleared, get_alias_set (elttype));
5127 emit_insn (GEN_FCN (icode) (target,
5128 gen_rtx_PARALLEL (GET_MODE (target),
5129 gen_rtvec_v (n_elts, vector))));
5133 /* Set constructor assignments. */
5134 else if (TREE_CODE (type) == SET_TYPE)
5136 tree elt = CONSTRUCTOR_ELTS (exp);
5137 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
5138 tree domain = TYPE_DOMAIN (type);
5139 tree domain_min, domain_max, bitlength;
5141 /* The default implementation strategy is to extract the constant
5142 parts of the constructor, use that to initialize the target,
5143 and then "or" in whatever non-constant ranges we need in addition.
5145 If a large set is all zero or all ones, it is
5146 probably better to set it using memset.
5147 Also, if a large set has just a single range, it may also be
5148 better to first clear all the first clear the set (using
5149 memset), and set the bits we want. */
5151 /* Check for all zeros. */
5152 if (elt == NULL_TREE && size > 0)
5155 clear_storage (target, GEN_INT (size));
5159 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
5160 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
5161 bitlength = size_binop (PLUS_EXPR,
5162 size_diffop (domain_max, domain_min),
5165 nbits = tree_low_cst (bitlength, 1);
5167 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
5168 are "complicated" (more than one range), initialize (the
5169 constant parts) by copying from a constant. */
5170 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
5171 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
5173 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
5174 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
5175 char *bit_buffer = alloca (nbits);
5176 HOST_WIDE_INT word = 0;
5177 unsigned int bit_pos = 0;
5178 unsigned int ibit = 0;
5179 unsigned int offset = 0; /* In bytes from beginning of set. */
5181 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
5184 if (bit_buffer[ibit])
5186 if (BYTES_BIG_ENDIAN)
5187 word |= (1 << (set_word_size - 1 - bit_pos));
5189 word |= 1 << bit_pos;
5193 if (bit_pos >= set_word_size || ibit == nbits)
5195 if (word != 0 || ! cleared)
5197 rtx datum = gen_int_mode (word, mode);
5200 /* The assumption here is that it is safe to use
5201 XEXP if the set is multi-word, but not if
5202 it's single-word. */
5204 to_rtx = adjust_address (target, mode, offset);
5205 else if (offset == 0)
5209 emit_move_insn (to_rtx, datum);
5216 offset += set_word_size / BITS_PER_UNIT;
5221 /* Don't bother clearing storage if the set is all ones. */
5222 if (TREE_CHAIN (elt) != NULL_TREE
5223 || (TREE_PURPOSE (elt) == NULL_TREE
5225 : ( ! host_integerp (TREE_VALUE (elt), 0)
5226 || ! host_integerp (TREE_PURPOSE (elt), 0)
5227 || (tree_low_cst (TREE_VALUE (elt), 0)
5228 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5229 != (HOST_WIDE_INT) nbits))))
5230 clear_storage (target, expr_size (exp));
5232 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5234 /* Start of range of element or NULL. */
5235 tree startbit = TREE_PURPOSE (elt);
5236 /* End of range of element, or element value. */
5237 tree endbit = TREE_VALUE (elt);
5238 HOST_WIDE_INT startb, endb;
5239 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5241 bitlength_rtx = expand_expr (bitlength,
5242 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5244 /* Handle non-range tuple element like [ expr ]. */
5245 if (startbit == NULL_TREE)
5247 startbit = save_expr (endbit);
5251 startbit = convert (sizetype, startbit);
5252 endbit = convert (sizetype, endbit);
5253 if (! integer_zerop (domain_min))
5255 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5256 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5258 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5259 EXPAND_CONST_ADDRESS);
5260 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5261 EXPAND_CONST_ADDRESS);
5267 ((build_qualified_type (lang_hooks.types.type_for_mode
5268 (GET_MODE (target), 0),
5271 emit_move_insn (targetx, target);
5274 else if (MEM_P (target))
5279 /* Optimization: If startbit and endbit are constants divisible
5280 by BITS_PER_UNIT, call memset instead. */
5281 if (TREE_CODE (startbit) == INTEGER_CST
5282 && TREE_CODE (endbit) == INTEGER_CST
5283 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5284 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5286 emit_library_call (memset_libfunc, LCT_NORMAL,
5288 plus_constant (XEXP (targetx, 0),
5289 startb / BITS_PER_UNIT),
5291 constm1_rtx, TYPE_MODE (integer_type_node),
5292 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5293 TYPE_MODE (sizetype));
5296 emit_library_call (setbits_libfunc, LCT_NORMAL,
5297 VOIDmode, 4, XEXP (targetx, 0),
5298 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5299 startbit_rtx, TYPE_MODE (sizetype),
5300 endbit_rtx, TYPE_MODE (sizetype));
5303 emit_move_insn (target, targetx);
5311 /* Store the value of EXP (an expression tree)
5312 into a subfield of TARGET which has mode MODE and occupies
5313 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5314 If MODE is VOIDmode, it means that we are storing into a bit-field.
5316 If VALUE_MODE is VOIDmode, return nothing in particular.
5317 UNSIGNEDP is not used in this case.
5319 Otherwise, return an rtx for the value stored. This rtx
5320 has mode VALUE_MODE if that is convenient to do.
5321 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5323 TYPE is the type of the underlying object,
5325 ALIAS_SET is the alias set for the destination. This value will
5326 (in general) be different from that for TARGET, since TARGET is a
5327 reference to the containing structure. */
5330 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5331 enum machine_mode mode, tree exp, enum machine_mode value_mode,
5332 int unsignedp, tree type, int alias_set)
5334 HOST_WIDE_INT width_mask = 0;
5336 if (TREE_CODE (exp) == ERROR_MARK)
5339 /* If we have nothing to store, do nothing unless the expression has
5342 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5343 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5344 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5346 /* If we are storing into an unaligned field of an aligned union that is
5347 in a register, we may have the mode of TARGET being an integer mode but
5348 MODE == BLKmode. In that case, get an aligned object whose size and
5349 alignment are the same as TARGET and store TARGET into it (we can avoid
5350 the store if the field being stored is the entire width of TARGET). Then
5351 call ourselves recursively to store the field into a BLKmode version of
5352 that object. Finally, load from the object into TARGET. This is not
5353 very efficient in general, but should only be slightly more expensive
5354 than the otherwise-required unaligned accesses. Perhaps this can be
5355 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5356 twice, once with emit_move_insn and once via store_field. */
5359 && (REG_P (target) || GET_CODE (target) == SUBREG))
5361 rtx object = assign_temp (type, 0, 1, 1);
5362 rtx blk_object = adjust_address (object, BLKmode, 0);
5364 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5365 emit_move_insn (object, target);
5367 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5370 emit_move_insn (target, object);
5372 /* We want to return the BLKmode version of the data. */
5376 if (GET_CODE (target) == CONCAT)
5378 /* We're storing into a struct containing a single __complex. */
5382 return store_expr (exp, target, value_mode != VOIDmode);
5385 /* If the structure is in a register or if the component
5386 is a bit field, we cannot use addressing to access it.
5387 Use bit-field techniques or SUBREG to store in it. */
5389 if (mode == VOIDmode
5390 || (mode != BLKmode && ! direct_store[(int) mode]
5391 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5392 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5394 || GET_CODE (target) == SUBREG
5395 /* If the field isn't aligned enough to store as an ordinary memref,
5396 store it as a bit field. */
5398 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5399 || bitpos % GET_MODE_ALIGNMENT (mode))
5400 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5401 || (bitpos % BITS_PER_UNIT != 0)))
5402 /* If the RHS and field are a constant size and the size of the
5403 RHS isn't the same size as the bitfield, we must use bitfield
5406 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5407 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5409 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5411 /* If BITSIZE is narrower than the size of the type of EXP
5412 we will be narrowing TEMP. Normally, what's wanted are the
5413 low-order bits. However, if EXP's type is a record and this is
5414 big-endian machine, we want the upper BITSIZE bits. */
5415 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5416 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5417 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5418 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5419 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5423 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5425 if (mode != VOIDmode && mode != BLKmode
5426 && mode != TYPE_MODE (TREE_TYPE (exp)))
5427 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5429 /* If the modes of TARGET and TEMP are both BLKmode, both
5430 must be in memory and BITPOS must be aligned on a byte
5431 boundary. If so, we simply do a block copy. */
5432 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5434 if (!MEM_P (target) || !MEM_P (temp)
5435 || bitpos % BITS_PER_UNIT != 0)
5438 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5439 emit_block_move (target, temp,
5440 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5444 return value_mode == VOIDmode ? const0_rtx : target;
5447 /* Store the value in the bitfield. */
5448 store_bit_field (target, bitsize, bitpos, mode, temp,
5449 int_size_in_bytes (type));
5451 if (value_mode != VOIDmode)
5453 /* The caller wants an rtx for the value.
5454 If possible, avoid refetching from the bitfield itself. */
5456 && ! (MEM_P (target) && MEM_VOLATILE_P (target)))
5459 enum machine_mode tmode;
5461 tmode = GET_MODE (temp);
5462 if (tmode == VOIDmode)
5466 return expand_and (tmode, temp,
5467 gen_int_mode (width_mask, tmode),
5470 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5471 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5472 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5475 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5476 NULL_RTX, value_mode, VOIDmode,
5477 int_size_in_bytes (type));
5483 rtx addr = XEXP (target, 0);
5484 rtx to_rtx = target;
5486 /* If a value is wanted, it must be the lhs;
5487 so make the address stable for multiple use. */
5489 if (value_mode != VOIDmode && !REG_P (addr)
5490 && ! CONSTANT_ADDRESS_P (addr)
5491 /* A frame-pointer reference is already stable. */
5492 && ! (GET_CODE (addr) == PLUS
5493 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5494 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5495 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5496 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5498 /* Now build a reference to just the desired component. */
5500 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5502 if (to_rtx == target)
5503 to_rtx = copy_rtx (to_rtx);
5505 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5506 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5507 set_mem_alias_set (to_rtx, alias_set);
5509 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5513 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5514 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5515 codes and find the ultimate containing object, which we return.
5517 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5518 bit position, and *PUNSIGNEDP to the signedness of the field.
5519 If the position of the field is variable, we store a tree
5520 giving the variable offset (in units) in *POFFSET.
5521 This offset is in addition to the bit position.
5522 If the position is not variable, we store 0 in *POFFSET.
5524 If any of the extraction expressions is volatile,
5525 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5527 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5528 is a mode that can be used to access the field. In that case, *PBITSIZE
5531 If the field describes a variable-sized object, *PMODE is set to
5532 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5533 this case, but the address of the object can be found. */
5536 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5537 HOST_WIDE_INT *pbitpos, tree *poffset,
5538 enum machine_mode *pmode, int *punsignedp,
5542 enum machine_mode mode = VOIDmode;
5543 tree offset = size_zero_node;
5544 tree bit_offset = bitsize_zero_node;
5547 /* First get the mode, signedness, and size. We do this from just the
5548 outermost expression. */
5549 if (TREE_CODE (exp) == COMPONENT_REF)
5551 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5552 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5553 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5555 *punsignedp = DECL_UNSIGNED (TREE_OPERAND (exp, 1));
5557 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5559 size_tree = TREE_OPERAND (exp, 1);
5560 *punsignedp = BIT_FIELD_REF_UNSIGNED (exp);
5564 mode = TYPE_MODE (TREE_TYPE (exp));
5565 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
5567 if (mode == BLKmode)
5568 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5570 *pbitsize = GET_MODE_BITSIZE (mode);
5575 if (! host_integerp (size_tree, 1))
5576 mode = BLKmode, *pbitsize = -1;
5578 *pbitsize = tree_low_cst (size_tree, 1);
5581 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5582 and find the ultimate containing object. */
5585 if (TREE_CODE (exp) == BIT_FIELD_REF)
5586 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5587 else if (TREE_CODE (exp) == COMPONENT_REF)
5589 tree field = TREE_OPERAND (exp, 1);
5590 tree this_offset = component_ref_field_offset (exp);
5592 /* If this field hasn't been filled in yet, don't go
5593 past it. This should only happen when folding expressions
5594 made during type construction. */
5595 if (this_offset == 0)
5598 offset = size_binop (PLUS_EXPR, offset, this_offset);
5599 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5600 DECL_FIELD_BIT_OFFSET (field));
5602 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5605 else if (TREE_CODE (exp) == ARRAY_REF
5606 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5608 tree index = TREE_OPERAND (exp, 1);
5609 tree low_bound = array_ref_low_bound (exp);
5610 tree unit_size = array_ref_element_size (exp);
5612 /* We assume all arrays have sizes that are a multiple of a byte.
5613 First subtract the lower bound, if any, in the type of the
5614 index, then convert to sizetype and multiply by the size of the
5616 if (! integer_zerop (low_bound))
5617 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5620 offset = size_binop (PLUS_EXPR, offset,
5621 size_binop (MULT_EXPR,
5622 convert (sizetype, index),
5626 /* We can go inside most conversions: all NON_VALUE_EXPRs, all normal
5627 conversions that don't change the mode, and all view conversions
5628 except those that need to "step up" the alignment. */
5629 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5630 && ! (TREE_CODE (exp) == VIEW_CONVERT_EXPR
5631 && ! ((TYPE_ALIGN (TREE_TYPE (exp))
5632 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5634 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5635 < BIGGEST_ALIGNMENT)
5636 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5637 || TYPE_ALIGN_OK (TREE_TYPE
5638 (TREE_OPERAND (exp, 0))))))
5639 && ! ((TREE_CODE (exp) == NOP_EXPR
5640 || TREE_CODE (exp) == CONVERT_EXPR)
5641 && (TYPE_MODE (TREE_TYPE (exp))
5642 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5645 /* If any reference in the chain is volatile, the effect is volatile. */
5646 if (TREE_THIS_VOLATILE (exp))
5649 exp = TREE_OPERAND (exp, 0);
5652 /* If OFFSET is constant, see if we can return the whole thing as a
5653 constant bit position. Otherwise, split it up. */
5654 if (host_integerp (offset, 0)
5655 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5657 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5658 && host_integerp (tem, 0))
5659 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5661 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5667 /* Return a tree of sizetype representing the size, in bytes, of the element
5668 of EXP, an ARRAY_REF. */
5671 array_ref_element_size (tree exp)
5673 tree aligned_size = TREE_OPERAND (exp, 3);
5674 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5676 /* If a size was specified in the ARRAY_REF, it's the size measured
5677 in alignment units of the element type. So multiply by that value. */
5679 return size_binop (MULT_EXPR, aligned_size,
5680 size_int (TYPE_ALIGN (elmt_type) / BITS_PER_UNIT));
5682 /* Otherwise, take the size from that of the element type. Substitute
5683 any PLACEHOLDER_EXPR that we have. */
5685 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
5688 /* Return a tree representing the lower bound of the array mentioned in
5689 EXP, an ARRAY_REF. */
5692 array_ref_low_bound (tree exp)
5694 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5696 /* If a lower bound is specified in EXP, use it. */
5697 if (TREE_OPERAND (exp, 2))
5698 return TREE_OPERAND (exp, 2);
5700 /* Otherwise, if there is a domain type and it has a lower bound, use it,
5701 substituting for a PLACEHOLDER_EXPR as needed. */
5702 if (domain_type && TYPE_MIN_VALUE (domain_type))
5703 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
5705 /* Otherwise, return a zero of the appropriate type. */
5706 return fold_convert (TREE_TYPE (TREE_OPERAND (exp, 1)), integer_zero_node);
5709 /* Return a tree representing the offset, in bytes, of the field referenced
5710 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
5713 component_ref_field_offset (tree exp)
5715 tree aligned_offset = TREE_OPERAND (exp, 2);
5716 tree field = TREE_OPERAND (exp, 1);
5718 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
5719 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
5722 return size_binop (MULT_EXPR, aligned_offset,
5723 size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT));
5725 /* Otherwise, take the offset from that of the field. Substitute
5726 any PLACEHOLDER_EXPR that we have. */
5728 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
5731 /* Return 1 if T is an expression that get_inner_reference handles. */
5734 handled_component_p (tree t)
5736 switch (TREE_CODE (t))
5741 case ARRAY_RANGE_REF:
5742 case NON_LVALUE_EXPR:
5743 case VIEW_CONVERT_EXPR:
5746 /* ??? Sure they are handled, but get_inner_reference may return
5747 a different PBITSIZE, depending upon whether the expression is
5748 wrapped up in a NOP_EXPR or not, e.g. for bitfields. */
5751 return (TYPE_MODE (TREE_TYPE (t))
5752 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5759 /* Given an rtx VALUE that may contain additions and multiplications, return
5760 an equivalent value that just refers to a register, memory, or constant.
5761 This is done by generating instructions to perform the arithmetic and
5762 returning a pseudo-register containing the value.
5764 The returned value may be a REG, SUBREG, MEM or constant. */
5767 force_operand (rtx value, rtx target)
5770 /* Use subtarget as the target for operand 0 of a binary operation. */
5771 rtx subtarget = get_subtarget (target);
5772 enum rtx_code code = GET_CODE (value);
5774 /* Check for subreg applied to an expression produced by loop optimizer. */
5776 && !REG_P (SUBREG_REG (value))
5777 && !MEM_P (SUBREG_REG (value)))
5779 value = simplify_gen_subreg (GET_MODE (value),
5780 force_reg (GET_MODE (SUBREG_REG (value)),
5781 force_operand (SUBREG_REG (value),
5783 GET_MODE (SUBREG_REG (value)),
5784 SUBREG_BYTE (value));
5785 code = GET_CODE (value);
5788 /* Check for a PIC address load. */
5789 if ((code == PLUS || code == MINUS)
5790 && XEXP (value, 0) == pic_offset_table_rtx
5791 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5792 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5793 || GET_CODE (XEXP (value, 1)) == CONST))
5796 subtarget = gen_reg_rtx (GET_MODE (value));
5797 emit_move_insn (subtarget, value);
5801 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5804 target = gen_reg_rtx (GET_MODE (value));
5805 convert_move (target, force_operand (XEXP (value, 0), NULL),
5806 code == ZERO_EXTEND);
5810 if (ARITHMETIC_P (value))
5812 op2 = XEXP (value, 1);
5813 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
5815 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5818 op2 = negate_rtx (GET_MODE (value), op2);
5821 /* Check for an addition with OP2 a constant integer and our first
5822 operand a PLUS of a virtual register and something else. In that
5823 case, we want to emit the sum of the virtual register and the
5824 constant first and then add the other value. This allows virtual
5825 register instantiation to simply modify the constant rather than
5826 creating another one around this addition. */
5827 if (code == PLUS && GET_CODE (op2) == CONST_INT
5828 && GET_CODE (XEXP (value, 0)) == PLUS
5829 && REG_P (XEXP (XEXP (value, 0), 0))
5830 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5831 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5833 rtx temp = expand_simple_binop (GET_MODE (value), code,
5834 XEXP (XEXP (value, 0), 0), op2,
5835 subtarget, 0, OPTAB_LIB_WIDEN);
5836 return expand_simple_binop (GET_MODE (value), code, temp,
5837 force_operand (XEXP (XEXP (value,
5839 target, 0, OPTAB_LIB_WIDEN);
5842 op1 = force_operand (XEXP (value, 0), subtarget);
5843 op2 = force_operand (op2, NULL_RTX);
5847 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5849 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5850 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5851 target, 1, OPTAB_LIB_WIDEN);
5853 return expand_divmod (0,
5854 FLOAT_MODE_P (GET_MODE (value))
5855 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5856 GET_MODE (value), op1, op2, target, 0);
5859 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5863 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5867 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5871 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5872 target, 0, OPTAB_LIB_WIDEN);
5875 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5876 target, 1, OPTAB_LIB_WIDEN);
5879 if (UNARY_P (value))
5881 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5882 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
5885 #ifdef INSN_SCHEDULING
5886 /* On machines that have insn scheduling, we want all memory reference to be
5887 explicit, so we need to deal with such paradoxical SUBREGs. */
5888 if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value))
5889 && (GET_MODE_SIZE (GET_MODE (value))
5890 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5892 = simplify_gen_subreg (GET_MODE (value),
5893 force_reg (GET_MODE (SUBREG_REG (value)),
5894 force_operand (SUBREG_REG (value),
5896 GET_MODE (SUBREG_REG (value)),
5897 SUBREG_BYTE (value));
5903 /* Subroutine of expand_expr: return nonzero iff there is no way that
5904 EXP can reference X, which is being modified. TOP_P is nonzero if this
5905 call is going to be used to determine whether we need a temporary
5906 for EXP, as opposed to a recursive call to this function.
5908 It is always safe for this routine to return zero since it merely
5909 searches for optimization opportunities. */
5912 safe_from_p (rtx x, tree exp, int top_p)
5918 /* If EXP has varying size, we MUST use a target since we currently
5919 have no way of allocating temporaries of variable size
5920 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5921 So we assume here that something at a higher level has prevented a
5922 clash. This is somewhat bogus, but the best we can do. Only
5923 do this when X is BLKmode and when we are at the top level. */
5924 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5925 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5926 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5927 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5928 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5930 && GET_MODE (x) == BLKmode)
5931 /* If X is in the outgoing argument area, it is always safe. */
5933 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5934 || (GET_CODE (XEXP (x, 0)) == PLUS
5935 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5938 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5939 find the underlying pseudo. */
5940 if (GET_CODE (x) == SUBREG)
5943 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
5947 /* Now look at our tree code and possibly recurse. */
5948 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5951 exp_rtl = DECL_RTL_IF_SET (exp);
5958 if (TREE_CODE (exp) == TREE_LIST)
5962 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
5964 exp = TREE_CHAIN (exp);
5967 if (TREE_CODE (exp) != TREE_LIST)
5968 return safe_from_p (x, exp, 0);
5971 else if (TREE_CODE (exp) == ERROR_MARK)
5972 return 1; /* An already-visited SAVE_EXPR? */
5977 /* The only case we look at here is the DECL_INITIAL inside a
5979 return (TREE_CODE (exp) != DECL_EXPR
5980 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
5981 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
5982 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
5986 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
5991 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5995 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5996 the expression. If it is set, we conflict iff we are that rtx or
5997 both are in memory. Otherwise, we check all operands of the
5998 expression recursively. */
6000 switch (TREE_CODE (exp))
6003 /* If the operand is static or we are static, we can't conflict.
6004 Likewise if we don't conflict with the operand at all. */
6005 if (staticp (TREE_OPERAND (exp, 0))
6006 || TREE_STATIC (exp)
6007 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6010 /* Otherwise, the only way this can conflict is if we are taking
6011 the address of a DECL a that address if part of X, which is
6013 exp = TREE_OPERAND (exp, 0);
6016 if (!DECL_RTL_SET_P (exp)
6017 || !MEM_P (DECL_RTL (exp)))
6020 exp_rtl = XEXP (DECL_RTL (exp), 0);
6026 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6027 get_alias_set (exp)))
6032 /* Assume that the call will clobber all hard registers and
6034 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6039 case WITH_CLEANUP_EXPR:
6040 case CLEANUP_POINT_EXPR:
6041 /* Lowered by gimplify.c. */
6045 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6048 /* The only operand we look at is operand 1. The rest aren't
6049 part of the expression. */
6050 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
6056 /* If we have an rtx, we do not need to scan our operands. */
6060 nops = first_rtl_op (TREE_CODE (exp));
6061 for (i = 0; i < nops; i++)
6062 if (TREE_OPERAND (exp, i) != 0
6063 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6066 /* If this is a language-specific tree code, it may require
6067 special handling. */
6068 if ((unsigned int) TREE_CODE (exp)
6069 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
6070 && !lang_hooks.safe_from_p (x, exp))
6074 /* If we have an rtl, find any enclosed object. Then see if we conflict
6078 if (GET_CODE (exp_rtl) == SUBREG)
6080 exp_rtl = SUBREG_REG (exp_rtl);
6082 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6086 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6087 are memory and they conflict. */
6088 return ! (rtx_equal_p (x, exp_rtl)
6089 || (MEM_P (x) && MEM_P (exp_rtl)
6090 && true_dependence (exp_rtl, VOIDmode, x,
6091 rtx_addr_varies_p)));
6094 /* If we reach here, it is safe. */
6098 /* Subroutine of expand_expr: return rtx if EXP is a
6099 variable or parameter; else return 0. */
6105 switch (TREE_CODE (exp))
6109 return DECL_RTL (exp);
6115 /* Return the highest power of two that EXP is known to be a multiple of.
6116 This is used in updating alignment of MEMs in array references. */
6118 static unsigned HOST_WIDE_INT
6119 highest_pow2_factor (tree exp)
6121 unsigned HOST_WIDE_INT c0, c1;
6123 switch (TREE_CODE (exp))
6126 /* We can find the lowest bit that's a one. If the low
6127 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6128 We need to handle this case since we can find it in a COND_EXPR,
6129 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
6130 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6132 if (TREE_CONSTANT_OVERFLOW (exp))
6133 return BIGGEST_ALIGNMENT;
6136 /* Note: tree_low_cst is intentionally not used here,
6137 we don't care about the upper bits. */
6138 c0 = TREE_INT_CST_LOW (exp);
6140 return c0 ? c0 : BIGGEST_ALIGNMENT;
6144 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6145 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6146 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6147 return MIN (c0, c1);
6150 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6151 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6154 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6156 if (integer_pow2p (TREE_OPERAND (exp, 1))
6157 && host_integerp (TREE_OPERAND (exp, 1), 1))
6159 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6160 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6161 return MAX (1, c0 / c1);
6165 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6167 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6170 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6173 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6174 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6175 return MIN (c0, c1);
6184 /* Similar, except that the alignment requirements of TARGET are
6185 taken into account. Assume it is at least as aligned as its
6186 type, unless it is a COMPONENT_REF in which case the layout of
6187 the structure gives the alignment. */
6189 static unsigned HOST_WIDE_INT
6190 highest_pow2_factor_for_target (tree target, tree exp)
6192 unsigned HOST_WIDE_INT target_align, factor;
6194 factor = highest_pow2_factor (exp);
6195 if (TREE_CODE (target) == COMPONENT_REF)
6196 target_align = DECL_ALIGN (TREE_OPERAND (target, 1)) / BITS_PER_UNIT;
6198 target_align = TYPE_ALIGN (TREE_TYPE (target)) / BITS_PER_UNIT;
6199 return MAX (factor, target_align);
6202 /* Expands variable VAR. */
6205 expand_var (tree var)
6207 if (DECL_EXTERNAL (var))
6210 if (TREE_STATIC (var))
6211 /* If this is an inlined copy of a static local variable,
6212 look up the original decl. */
6213 var = DECL_ORIGIN (var);
6215 if (TREE_STATIC (var)
6216 ? !TREE_ASM_WRITTEN (var)
6217 : !DECL_RTL_SET_P (var))
6219 if (TREE_CODE (var) == VAR_DECL && DECL_DEFER_OUTPUT (var))
6221 /* Prepare a mem & address for the decl. */
6224 if (TREE_STATIC (var))
6227 x = gen_rtx_MEM (DECL_MODE (var),
6228 gen_reg_rtx (Pmode));
6230 set_mem_attributes (x, var, 1);
6231 SET_DECL_RTL (var, x);
6233 else if (lang_hooks.expand_decl (var))
6235 else if (TREE_CODE (var) == VAR_DECL && !TREE_STATIC (var))
6237 else if (TREE_CODE (var) == VAR_DECL && TREE_STATIC (var))
6238 rest_of_decl_compilation (var, NULL, 0, 0);
6239 else if (TREE_CODE (var) == TYPE_DECL
6240 || TREE_CODE (var) == CONST_DECL
6241 || TREE_CODE (var) == FUNCTION_DECL
6242 || TREE_CODE (var) == LABEL_DECL)
6243 /* No expansion needed. */;
6249 /* Expands declarations of variables in list VARS. */
6252 expand_vars (tree vars)
6254 for (; vars; vars = TREE_CHAIN (vars))
6258 if (DECL_EXTERNAL (var))
6262 expand_decl_init (var);
6266 /* Subroutine of expand_expr. Expand the two operands of a binary
6267 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6268 The value may be stored in TARGET if TARGET is nonzero. The
6269 MODIFIER argument is as documented by expand_expr. */
6272 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6273 enum expand_modifier modifier)
6275 if (! safe_from_p (target, exp1, 1))
6277 if (operand_equal_p (exp0, exp1, 0))
6279 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6280 *op1 = copy_rtx (*op0);
6284 /* If we need to preserve evaluation order, copy exp0 into its own
6285 temporary variable so that it can't be clobbered by exp1. */
6286 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6287 exp0 = save_expr (exp0);
6288 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6289 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6294 /* expand_expr: generate code for computing expression EXP.
6295 An rtx for the computed value is returned. The value is never null.
6296 In the case of a void EXP, const0_rtx is returned.
6298 The value may be stored in TARGET if TARGET is nonzero.
6299 TARGET is just a suggestion; callers must assume that
6300 the rtx returned may not be the same as TARGET.
6302 If TARGET is CONST0_RTX, it means that the value will be ignored.
6304 If TMODE is not VOIDmode, it suggests generating the
6305 result in mode TMODE. But this is done only when convenient.
6306 Otherwise, TMODE is ignored and the value generated in its natural mode.
6307 TMODE is just a suggestion; callers must assume that
6308 the rtx returned may not have mode TMODE.
6310 Note that TARGET may have neither TMODE nor MODE. In that case, it
6311 probably will not be used.
6313 If MODIFIER is EXPAND_SUM then when EXP is an addition
6314 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6315 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6316 products as above, or REG or MEM, or constant.
6317 Ordinarily in such cases we would output mul or add instructions
6318 and then return a pseudo reg containing the sum.
6320 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6321 it also marks a label as absolutely required (it can't be dead).
6322 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6323 This is used for outputting expressions used in initializers.
6325 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6326 with a constant address even if that address is not normally legitimate.
6327 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6329 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6330 a call parameter. Such targets require special care as we haven't yet
6331 marked TARGET so that it's safe from being trashed by libcalls. We
6332 don't want to use TARGET for anything but the final result;
6333 Intermediate values must go elsewhere. Additionally, calls to
6334 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6336 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6337 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6338 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
6339 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6342 static rtx expand_expr_real_1 (tree, rtx, enum machine_mode,
6343 enum expand_modifier, rtx *);
6346 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
6347 enum expand_modifier modifier, rtx *alt_rtl)
6350 rtx ret, last = NULL;
6352 /* Handle ERROR_MARK before anybody tries to access its type. */
6353 if (TREE_CODE (exp) == ERROR_MARK
6354 || TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK)
6356 ret = CONST0_RTX (tmode);
6357 return ret ? ret : const0_rtx;
6360 if (flag_non_call_exceptions)
6362 rn = lookup_stmt_eh_region (exp);
6363 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */
6365 last = get_last_insn ();
6368 /* If this is an expression of some kind and it has an associated line
6369 number, then emit the line number before expanding the expression.
6371 We need to save and restore the file and line information so that
6372 errors discovered during expansion are emitted with the right
6373 information. It would be better of the diagnostic routines
6374 used the file/line information embedded in the tree nodes rather
6376 if (cfun && EXPR_HAS_LOCATION (exp))
6378 location_t saved_location = input_location;
6379 input_location = EXPR_LOCATION (exp);
6380 emit_line_note (input_location);
6382 /* Record where the insns produced belong. */
6383 record_block_change (TREE_BLOCK (exp));
6385 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6387 input_location = saved_location;
6391 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6394 /* If using non-call exceptions, mark all insns that may trap.
6395 expand_call() will mark CALL_INSNs before we get to this code,
6396 but it doesn't handle libcalls, and these may trap. */
6400 for (insn = next_real_insn (last); insn;
6401 insn = next_real_insn (insn))
6403 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
6404 /* If we want exceptions for non-call insns, any
6405 may_trap_p instruction may throw. */
6406 && GET_CODE (PATTERN (insn)) != CLOBBER
6407 && GET_CODE (PATTERN (insn)) != USE
6408 && (GET_CODE (insn) == CALL_INSN || may_trap_p (PATTERN (insn))))
6410 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
6420 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
6421 enum expand_modifier modifier, rtx *alt_rtl)
6424 tree type = TREE_TYPE (exp);
6426 enum machine_mode mode;
6427 enum tree_code code = TREE_CODE (exp);
6429 rtx subtarget, original_target;
6432 bool reduce_bit_field = false;
6433 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field && !ignore \
6434 ? reduce_to_bit_field_precision ((expr), \
6439 mode = TYPE_MODE (type);
6440 unsignedp = TYPE_UNSIGNED (type);
6441 if (lang_hooks.reduce_bit_field_operations
6442 && TREE_CODE (type) == INTEGER_TYPE
6443 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type))
6445 /* An operation in what may be a bit-field type needs the
6446 result to be reduced to the precision of the bit-field type,
6447 which is narrower than that of the type's mode. */
6448 reduce_bit_field = true;
6449 if (modifier == EXPAND_STACK_PARM)
6453 /* Use subtarget as the target for operand 0 of a binary operation. */
6454 subtarget = get_subtarget (target);
6455 original_target = target;
6456 ignore = (target == const0_rtx
6457 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6458 || code == CONVERT_EXPR || code == COND_EXPR
6459 || code == VIEW_CONVERT_EXPR)
6460 && TREE_CODE (type) == VOID_TYPE));
6462 /* If we are going to ignore this result, we need only do something
6463 if there is a side-effect somewhere in the expression. If there
6464 is, short-circuit the most common cases here. Note that we must
6465 not call expand_expr with anything but const0_rtx in case this
6466 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6470 if (! TREE_SIDE_EFFECTS (exp))
6473 /* Ensure we reference a volatile object even if value is ignored, but
6474 don't do this if all we are doing is taking its address. */
6475 if (TREE_THIS_VOLATILE (exp)
6476 && TREE_CODE (exp) != FUNCTION_DECL
6477 && mode != VOIDmode && mode != BLKmode
6478 && modifier != EXPAND_CONST_ADDRESS)
6480 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6482 temp = copy_to_reg (temp);
6486 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6487 || code == INDIRECT_REF)
6488 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6491 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6492 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6494 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6495 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6498 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6499 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6500 /* If the second operand has no side effects, just evaluate
6502 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6504 else if (code == BIT_FIELD_REF)
6506 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6507 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6508 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6515 /* If will do cse, generate all results into pseudo registers
6516 since 1) that allows cse to find more things
6517 and 2) otherwise cse could produce an insn the machine
6518 cannot support. An exception is a CONSTRUCTOR into a multi-word
6519 MEM: that's much more likely to be most efficient into the MEM.
6520 Another is a CALL_EXPR which must return in memory. */
6522 if (! cse_not_expected && mode != BLKmode && target
6523 && (!REG_P (target) || REGNO (target) < FIRST_PSEUDO_REGISTER)
6524 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
6525 && ! (code == CALL_EXPR && aggregate_value_p (exp, exp)))
6532 tree function = decl_function_context (exp);
6534 temp = label_rtx (exp);
6535 temp = gen_rtx_LABEL_REF (Pmode, temp);
6537 if (function != current_function_decl
6539 LABEL_REF_NONLOCAL_P (temp) = 1;
6541 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
6546 if (!DECL_RTL_SET_P (exp))
6548 error ("%Jprior parameter's size depends on '%D'", exp, exp);
6549 return CONST0_RTX (mode);
6552 /* ... fall through ... */
6555 /* If a static var's type was incomplete when the decl was written,
6556 but the type is complete now, lay out the decl now. */
6557 if (DECL_SIZE (exp) == 0
6558 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6559 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6560 layout_decl (exp, 0);
6562 /* ... fall through ... */
6566 if (DECL_RTL (exp) == 0)
6569 /* Ensure variable marked as used even if it doesn't go through
6570 a parser. If it hasn't be used yet, write out an external
6572 if (! TREE_USED (exp))
6574 assemble_external (exp);
6575 TREE_USED (exp) = 1;
6578 /* Show we haven't gotten RTL for this yet. */
6581 /* Handle variables inherited from containing functions. */
6582 context = decl_function_context (exp);
6584 if (context != 0 && context != current_function_decl
6585 /* If var is static, we don't need a static chain to access it. */
6586 && ! (MEM_P (DECL_RTL (exp))
6587 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6591 /* Mark as non-local and addressable. */
6592 DECL_NONLOCAL (exp) = 1;
6593 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6595 lang_hooks.mark_addressable (exp);
6596 if (!MEM_P (DECL_RTL (exp)))
6598 addr = XEXP (DECL_RTL (exp), 0);
6601 = replace_equiv_address (addr,
6602 fix_lexical_addr (XEXP (addr, 0), exp));
6604 addr = fix_lexical_addr (addr, exp);
6606 temp = replace_equiv_address (DECL_RTL (exp), addr);
6609 /* This is the case of an array whose size is to be determined
6610 from its initializer, while the initializer is still being parsed.
6613 else if (MEM_P (DECL_RTL (exp))
6614 && REG_P (XEXP (DECL_RTL (exp), 0)))
6615 temp = validize_mem (DECL_RTL (exp));
6617 /* If DECL_RTL is memory, we are in the normal case and either
6618 the address is not valid or it is not a register and -fforce-addr
6619 is specified, get the address into a register. */
6621 else if (MEM_P (DECL_RTL (exp))
6622 && modifier != EXPAND_CONST_ADDRESS
6623 && modifier != EXPAND_SUM
6624 && modifier != EXPAND_INITIALIZER
6625 && (! memory_address_p (DECL_MODE (exp),
6626 XEXP (DECL_RTL (exp), 0))
6628 && !REG_P (XEXP (DECL_RTL (exp), 0)))))
6631 *alt_rtl = DECL_RTL (exp);
6632 temp = replace_equiv_address (DECL_RTL (exp),
6633 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6636 /* If we got something, return it. But first, set the alignment
6637 if the address is a register. */
6640 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
6641 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6646 /* If the mode of DECL_RTL does not match that of the decl, it
6647 must be a promoted value. We return a SUBREG of the wanted mode,
6648 but mark it so that we know that it was already extended. */
6650 if (REG_P (DECL_RTL (exp))
6651 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6653 /* Get the signedness used for this variable. Ensure we get the
6654 same mode we got when the variable was declared. */
6655 if (GET_MODE (DECL_RTL (exp))
6656 != promote_mode (type, DECL_MODE (exp), &unsignedp,
6657 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
6660 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6661 SUBREG_PROMOTED_VAR_P (temp) = 1;
6662 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6666 return DECL_RTL (exp);
6669 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6670 TREE_INT_CST_HIGH (exp), mode);
6672 /* ??? If overflow is set, fold will have done an incomplete job,
6673 which can result in (plus xx (const_int 0)), which can get
6674 simplified by validate_replace_rtx during virtual register
6675 instantiation, which can result in unrecognizable insns.
6676 Avoid this by forcing all overflows into registers. */
6677 if (TREE_CONSTANT_OVERFLOW (exp)
6678 && modifier != EXPAND_INITIALIZER)
6679 temp = force_reg (mode, temp);
6684 return const_vector_from_tree (exp);
6687 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
6690 /* If optimized, generate immediate CONST_DOUBLE
6691 which will be turned into memory by reload if necessary.
6693 We used to force a register so that loop.c could see it. But
6694 this does not allow gen_* patterns to perform optimizations with
6695 the constants. It also produces two insns in cases like "x = 1.0;".
6696 On most machines, floating-point constants are not permitted in
6697 many insns, so we'd end up copying it to a register in any case.
6699 Now, we do the copying in expand_binop, if appropriate. */
6700 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6701 TYPE_MODE (TREE_TYPE (exp)));
6704 /* Handle evaluating a complex constant in a CONCAT target. */
6705 if (original_target && GET_CODE (original_target) == CONCAT)
6707 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6710 rtarg = XEXP (original_target, 0);
6711 itarg = XEXP (original_target, 1);
6713 /* Move the real and imaginary parts separately. */
6714 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
6715 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
6718 emit_move_insn (rtarg, op0);
6720 emit_move_insn (itarg, op1);
6722 return original_target;
6725 /* ... fall through ... */
6728 temp = output_constant_def (exp, 1);
6730 /* temp contains a constant address.
6731 On RISC machines where a constant address isn't valid,
6732 make some insns to get that address into a register. */
6733 if (modifier != EXPAND_CONST_ADDRESS
6734 && modifier != EXPAND_INITIALIZER
6735 && modifier != EXPAND_SUM
6736 && (! memory_address_p (mode, XEXP (temp, 0))
6737 || flag_force_addr))
6738 return replace_equiv_address (temp,
6739 copy_rtx (XEXP (temp, 0)));
6744 tree val = TREE_OPERAND (exp, 0);
6745 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
6747 if (TREE_CODE (val) != VAR_DECL || !DECL_ARTIFICIAL (val))
6749 /* We can indeed still hit this case, typically via builtin
6750 expanders calling save_expr immediately before expanding
6751 something. Assume this means that we only have to deal
6752 with non-BLKmode values. */
6753 if (GET_MODE (ret) == BLKmode)
6756 val = build_decl (VAR_DECL, NULL, TREE_TYPE (exp));
6757 DECL_ARTIFICIAL (val) = 1;
6758 TREE_OPERAND (exp, 0) = val;
6760 if (!CONSTANT_P (ret))
6761 ret = copy_to_reg (ret);
6762 SET_DECL_RTL (val, ret);
6771 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6772 TREE_OPERAND (exp, 0)
6773 = lang_hooks.unsave_expr_now (TREE_OPERAND (exp, 0));
6778 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6779 expand_goto (TREE_OPERAND (exp, 0));
6781 expand_computed_goto (TREE_OPERAND (exp, 0));
6784 /* These are lowered during gimplification, so we should never ever
6790 case LABELED_BLOCK_EXPR:
6791 if (LABELED_BLOCK_BODY (exp))
6792 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6793 /* Should perhaps use expand_label, but this is simpler and safer. */
6794 do_pending_stack_adjust ();
6795 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6798 case EXIT_BLOCK_EXPR:
6799 if (EXIT_BLOCK_RETURN (exp))
6800 sorry ("returned value in block_exit_expr");
6801 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6806 tree block = BIND_EXPR_BLOCK (exp);
6809 /* If we're in functions-as-trees mode, this BIND_EXPR represents
6810 the block, so we need to emit NOTE_INSN_BLOCK_* notes. */
6811 mark_ends = (block != NULL_TREE);
6812 expand_start_bindings_and_block (mark_ends ? 0 : 2, block);
6814 /* If VARS have not yet been expanded, expand them now. */
6815 expand_vars (BIND_EXPR_VARS (exp));
6817 /* TARGET was clobbered early in this function. The correct
6818 indicator or whether or not we need the value of this
6819 expression is the IGNORE variable. */
6820 temp = expand_expr (BIND_EXPR_BODY (exp),
6821 ignore ? const0_rtx : target,
6824 expand_end_bindings (BIND_EXPR_VARS (exp), mark_ends, 0);
6830 /* If we don't need the result, just ensure we evaluate any
6836 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6837 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
6842 /* All elts simple constants => refer to a constant in memory. But
6843 if this is a non-BLKmode mode, let it store a field at a time
6844 since that should make a CONST_INT or CONST_DOUBLE when we
6845 fold. Likewise, if we have a target we can use, it is best to
6846 store directly into the target unless the type is large enough
6847 that memcpy will be used. If we are making an initializer and
6848 all operands are constant, put it in memory as well.
6850 FIXME: Avoid trying to fill vector constructors piece-meal.
6851 Output them with output_constant_def below unless we're sure
6852 they're zeros. This should go away when vector initializers
6853 are treated like VECTOR_CST instead of arrays.
6855 else if ((TREE_STATIC (exp)
6856 && ((mode == BLKmode
6857 && ! (target != 0 && safe_from_p (target, exp, 1)))
6858 || TREE_ADDRESSABLE (exp)
6859 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6860 && (! MOVE_BY_PIECES_P
6861 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6863 && ! mostly_zeros_p (exp))))
6864 || ((modifier == EXPAND_INITIALIZER
6865 || modifier == EXPAND_CONST_ADDRESS)
6866 && TREE_CONSTANT (exp)))
6868 rtx constructor = output_constant_def (exp, 1);
6870 if (modifier != EXPAND_CONST_ADDRESS
6871 && modifier != EXPAND_INITIALIZER
6872 && modifier != EXPAND_SUM)
6873 constructor = validize_mem (constructor);
6879 /* Handle calls that pass values in multiple non-contiguous
6880 locations. The Irix 6 ABI has examples of this. */
6881 if (target == 0 || ! safe_from_p (target, exp, 1)
6882 || GET_CODE (target) == PARALLEL
6883 || modifier == EXPAND_STACK_PARM)
6885 = assign_temp (build_qualified_type (type,
6887 | (TREE_READONLY (exp)
6888 * TYPE_QUAL_CONST))),
6889 0, TREE_ADDRESSABLE (exp), 1);
6891 store_constructor (exp, target, 0, int_expr_size (exp));
6897 tree exp1 = TREE_OPERAND (exp, 0);
6899 if (modifier != EXPAND_WRITE)
6903 t = fold_read_from_constant_string (exp);
6905 return expand_expr (t, target, tmode, modifier);
6908 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6909 op0 = memory_address (mode, op0);
6910 temp = gen_rtx_MEM (mode, op0);
6911 set_mem_attributes (temp, exp, 0);
6913 /* If we are writing to this object and its type is a record with
6914 readonly fields, we must mark it as readonly so it will
6915 conflict with readonly references to those fields. */
6916 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
6917 RTX_UNCHANGING_P (temp) = 1;
6924 #ifdef ENABLE_CHECKING
6925 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6930 tree array = TREE_OPERAND (exp, 0);
6931 tree low_bound = array_ref_low_bound (exp);
6932 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6935 /* Optimize the special-case of a zero lower bound.
6937 We convert the low_bound to sizetype to avoid some problems
6938 with constant folding. (E.g. suppose the lower bound is 1,
6939 and its mode is QI. Without the conversion, (ARRAY
6940 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6941 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6943 if (! integer_zerop (low_bound))
6944 index = size_diffop (index, convert (sizetype, low_bound));
6946 /* Fold an expression like: "foo"[2].
6947 This is not done in fold so it won't happen inside &.
6948 Don't fold if this is for wide characters since it's too
6949 difficult to do correctly and this is a very rare case. */
6951 if (modifier != EXPAND_CONST_ADDRESS
6952 && modifier != EXPAND_INITIALIZER
6953 && modifier != EXPAND_MEMORY)
6955 tree t = fold_read_from_constant_string (exp);
6958 return expand_expr (t, target, tmode, modifier);
6961 /* If this is a constant index into a constant array,
6962 just get the value from the array. Handle both the cases when
6963 we have an explicit constructor and when our operand is a variable
6964 that was declared const. */
6966 if (modifier != EXPAND_CONST_ADDRESS
6967 && modifier != EXPAND_INITIALIZER
6968 && modifier != EXPAND_MEMORY
6969 && TREE_CODE (array) == CONSTRUCTOR
6970 && ! TREE_SIDE_EFFECTS (array)
6971 && TREE_CODE (index) == INTEGER_CST
6972 && 0 > compare_tree_int (index,
6973 list_length (CONSTRUCTOR_ELTS
6974 (TREE_OPERAND (exp, 0)))))
6978 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6979 i = TREE_INT_CST_LOW (index);
6980 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6984 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
6988 else if (optimize >= 1
6989 && modifier != EXPAND_CONST_ADDRESS
6990 && modifier != EXPAND_INITIALIZER
6991 && modifier != EXPAND_MEMORY
6992 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6993 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6994 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
6995 && targetm.binds_local_p (array))
6997 if (TREE_CODE (index) == INTEGER_CST)
6999 tree init = DECL_INITIAL (array);
7001 if (TREE_CODE (init) == CONSTRUCTOR)
7005 for (elem = CONSTRUCTOR_ELTS (init);
7007 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
7008 elem = TREE_CHAIN (elem))
7011 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
7012 return expand_expr (fold (TREE_VALUE (elem)), target,
7015 else if (TREE_CODE (init) == STRING_CST
7016 && 0 > compare_tree_int (index,
7017 TREE_STRING_LENGTH (init)))
7019 tree type = TREE_TYPE (TREE_TYPE (init));
7020 enum machine_mode mode = TYPE_MODE (type);
7022 if (GET_MODE_CLASS (mode) == MODE_INT
7023 && GET_MODE_SIZE (mode) == 1)
7024 return gen_int_mode (TREE_STRING_POINTER (init)
7025 [TREE_INT_CST_LOW (index)], mode);
7030 goto normal_inner_ref;
7033 /* If the operand is a CONSTRUCTOR, we can just extract the
7034 appropriate field if it is present. */
7035 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
7039 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
7040 elt = TREE_CHAIN (elt))
7041 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
7042 /* We can normally use the value of the field in the
7043 CONSTRUCTOR. However, if this is a bitfield in
7044 an integral mode that we can fit in a HOST_WIDE_INT,
7045 we must mask only the number of bits in the bitfield,
7046 since this is done implicitly by the constructor. If
7047 the bitfield does not meet either of those conditions,
7048 we can't do this optimization. */
7049 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
7050 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
7052 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
7053 <= HOST_BITS_PER_WIDE_INT))))
7055 if (DECL_BIT_FIELD (TREE_PURPOSE (elt))
7056 && modifier == EXPAND_STACK_PARM)
7058 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
7059 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
7061 HOST_WIDE_INT bitsize
7062 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
7063 enum machine_mode imode
7064 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
7066 if (TYPE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
7068 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7069 op0 = expand_and (imode, op0, op1, target);
7074 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
7077 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7079 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7087 goto normal_inner_ref;
7090 case ARRAY_RANGE_REF:
7093 enum machine_mode mode1;
7094 HOST_WIDE_INT bitsize, bitpos;
7097 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7098 &mode1, &unsignedp, &volatilep);
7101 /* If we got back the original object, something is wrong. Perhaps
7102 we are evaluating an expression too early. In any event, don't
7103 infinitely recurse. */
7107 /* If TEM's type is a union of variable size, pass TARGET to the inner
7108 computation, since it will need a temporary and TARGET is known
7109 to have to do. This occurs in unchecked conversion in Ada. */
7113 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7114 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7116 && modifier != EXPAND_STACK_PARM
7117 ? target : NULL_RTX),
7119 (modifier == EXPAND_INITIALIZER
7120 || modifier == EXPAND_CONST_ADDRESS
7121 || modifier == EXPAND_STACK_PARM)
7122 ? modifier : EXPAND_NORMAL);
7124 /* If this is a constant, put it into a register if it is a
7125 legitimate constant and OFFSET is 0 and memory if it isn't. */
7126 if (CONSTANT_P (op0))
7128 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7129 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7131 op0 = force_reg (mode, op0);
7133 op0 = validize_mem (force_const_mem (mode, op0));
7136 /* Otherwise, if this object not in memory and we either have an
7137 offset or a BLKmode result, put it there. This case can't occur in
7138 C, but can in Ada if we have unchecked conversion of an expression
7139 from a scalar type to an array or record type or for an
7140 ARRAY_RANGE_REF whose type is BLKmode. */
7141 else if (!MEM_P (op0)
7143 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
7145 tree nt = build_qualified_type (TREE_TYPE (tem),
7146 (TYPE_QUALS (TREE_TYPE (tem))
7147 | TYPE_QUAL_CONST));
7148 rtx memloc = assign_temp (nt, 1, 1, 1);
7150 emit_move_insn (memloc, op0);
7156 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7162 #ifdef POINTERS_EXTEND_UNSIGNED
7163 if (GET_MODE (offset_rtx) != Pmode)
7164 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7166 if (GET_MODE (offset_rtx) != ptr_mode)
7167 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7170 if (GET_MODE (op0) == BLKmode
7171 /* A constant address in OP0 can have VOIDmode, we must
7172 not try to call force_reg in that case. */
7173 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7175 && (bitpos % bitsize) == 0
7176 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7177 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7179 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7183 op0 = offset_address (op0, offset_rtx,
7184 highest_pow2_factor (offset));
7187 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7188 record its alignment as BIGGEST_ALIGNMENT. */
7189 if (MEM_P (op0) && bitpos == 0 && offset != 0
7190 && is_aligning_offset (offset, tem))
7191 set_mem_align (op0, BIGGEST_ALIGNMENT);
7193 /* Don't forget about volatility even if this is a bitfield. */
7194 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
7196 if (op0 == orig_op0)
7197 op0 = copy_rtx (op0);
7199 MEM_VOLATILE_P (op0) = 1;
7202 /* The following code doesn't handle CONCAT.
7203 Assume only bitpos == 0 can be used for CONCAT, due to
7204 one element arrays having the same mode as its element. */
7205 if (GET_CODE (op0) == CONCAT)
7207 if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
7212 /* In cases where an aligned union has an unaligned object
7213 as a field, we might be extracting a BLKmode value from
7214 an integer-mode (e.g., SImode) object. Handle this case
7215 by doing the extract into an object as wide as the field
7216 (which we know to be the width of a basic mode), then
7217 storing into memory, and changing the mode to BLKmode. */
7218 if (mode1 == VOIDmode
7219 || REG_P (op0) || GET_CODE (op0) == SUBREG
7220 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7221 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7222 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7223 && modifier != EXPAND_CONST_ADDRESS
7224 && modifier != EXPAND_INITIALIZER)
7225 /* If the field isn't aligned enough to fetch as a memref,
7226 fetch it as a bit field. */
7227 || (mode1 != BLKmode
7228 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7229 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
7231 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
7232 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
7233 && ((modifier == EXPAND_CONST_ADDRESS
7234 || modifier == EXPAND_INITIALIZER)
7236 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
7237 || (bitpos % BITS_PER_UNIT != 0)))
7238 /* If the type and the field are a constant size and the
7239 size of the type isn't the same size as the bitfield,
7240 we must use bitfield operations. */
7242 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7244 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7247 enum machine_mode ext_mode = mode;
7249 if (ext_mode == BLKmode
7250 && ! (target != 0 && MEM_P (op0)
7252 && bitpos % BITS_PER_UNIT == 0))
7253 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7255 if (ext_mode == BLKmode)
7258 target = assign_temp (type, 0, 1, 1);
7263 /* In this case, BITPOS must start at a byte boundary and
7264 TARGET, if specified, must be a MEM. */
7266 || (target != 0 && !MEM_P (target))
7267 || bitpos % BITS_PER_UNIT != 0)
7270 emit_block_move (target,
7271 adjust_address (op0, VOIDmode,
7272 bitpos / BITS_PER_UNIT),
7273 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7275 (modifier == EXPAND_STACK_PARM
7276 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7281 op0 = validize_mem (op0);
7283 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
7284 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7286 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7287 (modifier == EXPAND_STACK_PARM
7288 ? NULL_RTX : target),
7290 int_size_in_bytes (TREE_TYPE (tem)));
7292 /* If the result is a record type and BITSIZE is narrower than
7293 the mode of OP0, an integral mode, and this is a big endian
7294 machine, we must put the field into the high-order bits. */
7295 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7296 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7297 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7298 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7299 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7303 /* If the result type is BLKmode, store the data into a temporary
7304 of the appropriate type, but with the mode corresponding to the
7305 mode for the data we have (op0's mode). It's tempting to make
7306 this a constant type, since we know it's only being stored once,
7307 but that can cause problems if we are taking the address of this
7308 COMPONENT_REF because the MEM of any reference via that address
7309 will have flags corresponding to the type, which will not
7310 necessarily be constant. */
7311 if (mode == BLKmode)
7314 = assign_stack_temp_for_type
7315 (ext_mode, GET_MODE_BITSIZE (ext_mode), 0, type);
7317 emit_move_insn (new, op0);
7318 op0 = copy_rtx (new);
7319 PUT_MODE (op0, BLKmode);
7320 set_mem_attributes (op0, exp, 1);
7326 /* If the result is BLKmode, use that to access the object
7328 if (mode == BLKmode)
7331 /* Get a reference to just this component. */
7332 if (modifier == EXPAND_CONST_ADDRESS
7333 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7334 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7336 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7338 if (op0 == orig_op0)
7339 op0 = copy_rtx (op0);
7341 set_mem_attributes (op0, exp, 0);
7342 if (REG_P (XEXP (op0, 0)))
7343 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7345 MEM_VOLATILE_P (op0) |= volatilep;
7346 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7347 || modifier == EXPAND_CONST_ADDRESS
7348 || modifier == EXPAND_INITIALIZER)
7350 else if (target == 0)
7351 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7353 convert_move (target, op0, unsignedp);
7358 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
7361 /* Check for a built-in function. */
7362 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7363 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7365 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7367 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7368 == BUILT_IN_FRONTEND)
7369 return lang_hooks.expand_expr (exp, original_target,
7373 return expand_builtin (exp, target, subtarget, tmode, ignore);
7376 return expand_call (exp, target, ignore);
7378 case NON_LVALUE_EXPR:
7381 if (TREE_OPERAND (exp, 0) == error_mark_node)
7384 if (TREE_CODE (type) == UNION_TYPE)
7386 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7388 /* If both input and output are BLKmode, this conversion isn't doing
7389 anything except possibly changing memory attribute. */
7390 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7392 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7395 result = copy_rtx (result);
7396 set_mem_attributes (result, exp, 0);
7402 if (TYPE_MODE (type) != BLKmode)
7403 target = gen_reg_rtx (TYPE_MODE (type));
7405 target = assign_temp (type, 0, 1, 1);
7409 /* Store data into beginning of memory target. */
7410 store_expr (TREE_OPERAND (exp, 0),
7411 adjust_address (target, TYPE_MODE (valtype), 0),
7412 modifier == EXPAND_STACK_PARM ? 2 : 0);
7414 else if (REG_P (target))
7415 /* Store this field into a union of the proper type. */
7416 store_field (target,
7417 MIN ((int_size_in_bytes (TREE_TYPE
7418 (TREE_OPERAND (exp, 0)))
7420 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7421 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7422 VOIDmode, 0, type, 0);
7426 /* Return the entire union. */
7430 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7432 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7435 /* If the signedness of the conversion differs and OP0 is
7436 a promoted SUBREG, clear that indication since we now
7437 have to do the proper extension. */
7438 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7439 && GET_CODE (op0) == SUBREG)
7440 SUBREG_PROMOTED_VAR_P (op0) = 0;
7442 return REDUCE_BIT_FIELD (op0);
7445 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7446 op0 = REDUCE_BIT_FIELD (op0);
7447 if (GET_MODE (op0) == mode)
7450 /* If OP0 is a constant, just convert it into the proper mode. */
7451 if (CONSTANT_P (op0))
7453 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7454 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7456 if (modifier == EXPAND_INITIALIZER)
7457 return simplify_gen_subreg (mode, op0, inner_mode,
7458 subreg_lowpart_offset (mode,
7461 return convert_modes (mode, inner_mode, op0,
7462 TYPE_UNSIGNED (inner_type));
7465 if (modifier == EXPAND_INITIALIZER)
7466 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7470 convert_to_mode (mode, op0,
7471 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7473 convert_move (target, op0,
7474 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7477 case VIEW_CONVERT_EXPR:
7478 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7480 /* If the input and output modes are both the same, we are done.
7481 Otherwise, if neither mode is BLKmode and both are integral and within
7482 a word, we can use gen_lowpart. If neither is true, make sure the
7483 operand is in memory and convert the MEM to the new mode. */
7484 if (TYPE_MODE (type) == GET_MODE (op0))
7486 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7487 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7488 && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT
7489 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7490 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7491 op0 = gen_lowpart (TYPE_MODE (type), op0);
7492 else if (!MEM_P (op0))
7494 /* If the operand is not a MEM, force it into memory. Since we
7495 are going to be be changing the mode of the MEM, don't call
7496 force_const_mem for constants because we don't allow pool
7497 constants to change mode. */
7498 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7500 if (TREE_ADDRESSABLE (exp))
7503 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7505 = assign_stack_temp_for_type
7506 (TYPE_MODE (inner_type),
7507 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7509 emit_move_insn (target, op0);
7513 /* At this point, OP0 is in the correct mode. If the output type is such
7514 that the operand is known to be aligned, indicate that it is.
7515 Otherwise, we need only be concerned about alignment for non-BLKmode
7519 op0 = copy_rtx (op0);
7521 if (TYPE_ALIGN_OK (type))
7522 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7523 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7524 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7526 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7527 HOST_WIDE_INT temp_size
7528 = MAX (int_size_in_bytes (inner_type),
7529 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7530 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7531 temp_size, 0, type);
7532 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7534 if (TREE_ADDRESSABLE (exp))
7537 if (GET_MODE (op0) == BLKmode)
7538 emit_block_move (new_with_op0_mode, op0,
7539 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7540 (modifier == EXPAND_STACK_PARM
7541 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7543 emit_move_insn (new_with_op0_mode, op0);
7548 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7554 this_optab = ! unsignedp && flag_trapv
7555 && (GET_MODE_CLASS (mode) == MODE_INT)
7556 ? addv_optab : add_optab;
7558 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
7559 something else, make sure we add the register to the constant and
7560 then to the other thing. This case can occur during strength
7561 reduction and doing it this way will produce better code if the
7562 frame pointer or argument pointer is eliminated.
7564 fold-const.c will ensure that the constant is always in the inner
7565 PLUS_EXPR, so the only case we need to do anything about is if
7566 sp, ap, or fp is our second argument, in which case we must swap
7567 the innermost first argument and our second argument. */
7569 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7570 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7571 && TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL
7572 && (DECL_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7573 || DECL_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7574 || DECL_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7576 tree t = TREE_OPERAND (exp, 1);
7578 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7579 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7582 /* If the result is to be ptr_mode and we are adding an integer to
7583 something, we might be forming a constant. So try to use
7584 plus_constant. If it produces a sum and we can't accept it,
7585 use force_operand. This allows P = &ARR[const] to generate
7586 efficient code on machines where a SYMBOL_REF is not a valid
7589 If this is an EXPAND_SUM call, always return the sum. */
7590 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7591 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7593 if (modifier == EXPAND_STACK_PARM)
7595 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7596 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7597 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7601 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7603 /* Use immed_double_const to ensure that the constant is
7604 truncated according to the mode of OP1, then sign extended
7605 to a HOST_WIDE_INT. Using the constant directly can result
7606 in non-canonical RTL in a 64x32 cross compile. */
7608 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7610 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7611 op1 = plus_constant (op1, INTVAL (constant_part));
7612 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7613 op1 = force_operand (op1, target);
7614 return REDUCE_BIT_FIELD (op1);
7617 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7618 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7619 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7623 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7624 (modifier == EXPAND_INITIALIZER
7625 ? EXPAND_INITIALIZER : EXPAND_SUM));
7626 if (! CONSTANT_P (op0))
7628 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7629 VOIDmode, modifier);
7630 /* Return a PLUS if modifier says it's OK. */
7631 if (modifier == EXPAND_SUM
7632 || modifier == EXPAND_INITIALIZER)
7633 return simplify_gen_binary (PLUS, mode, op0, op1);
7636 /* Use immed_double_const to ensure that the constant is
7637 truncated according to the mode of OP1, then sign extended
7638 to a HOST_WIDE_INT. Using the constant directly can result
7639 in non-canonical RTL in a 64x32 cross compile. */
7641 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7643 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7644 op0 = plus_constant (op0, INTVAL (constant_part));
7645 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7646 op0 = force_operand (op0, target);
7647 return REDUCE_BIT_FIELD (op0);
7651 /* No sense saving up arithmetic to be done
7652 if it's all in the wrong mode to form part of an address.
7653 And force_operand won't know whether to sign-extend or
7655 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7656 || mode != ptr_mode)
7658 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7659 subtarget, &op0, &op1, 0);
7660 if (op0 == const0_rtx)
7662 if (op1 == const0_rtx)
7667 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7668 subtarget, &op0, &op1, modifier);
7669 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7672 /* For initializers, we are allowed to return a MINUS of two
7673 symbolic constants. Here we handle all cases when both operands
7675 /* Handle difference of two symbolic constants,
7676 for the sake of an initializer. */
7677 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7678 && really_constant_p (TREE_OPERAND (exp, 0))
7679 && really_constant_p (TREE_OPERAND (exp, 1)))
7681 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7682 NULL_RTX, &op0, &op1, modifier);
7684 /* If the last operand is a CONST_INT, use plus_constant of
7685 the negated constant. Else make the MINUS. */
7686 if (GET_CODE (op1) == CONST_INT)
7687 return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
7689 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
7692 this_optab = ! unsignedp && flag_trapv
7693 && (GET_MODE_CLASS(mode) == MODE_INT)
7694 ? subv_optab : sub_optab;
7696 /* No sense saving up arithmetic to be done
7697 if it's all in the wrong mode to form part of an address.
7698 And force_operand won't know whether to sign-extend or
7700 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7701 || mode != ptr_mode)
7704 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7705 subtarget, &op0, &op1, modifier);
7707 /* Convert A - const to A + (-const). */
7708 if (GET_CODE (op1) == CONST_INT)
7710 op1 = negate_rtx (mode, op1);
7711 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7717 /* If first operand is constant, swap them.
7718 Thus the following special case checks need only
7719 check the second operand. */
7720 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7722 tree t1 = TREE_OPERAND (exp, 0);
7723 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7724 TREE_OPERAND (exp, 1) = t1;
7727 /* Attempt to return something suitable for generating an
7728 indexed address, for machines that support that. */
7730 if (modifier == EXPAND_SUM && mode == ptr_mode
7731 && host_integerp (TREE_OPERAND (exp, 1), 0))
7733 tree exp1 = TREE_OPERAND (exp, 1);
7735 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7739 op0 = force_operand (op0, NULL_RTX);
7741 op0 = copy_to_mode_reg (mode, op0);
7743 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
7744 gen_int_mode (tree_low_cst (exp1, 0),
7745 TYPE_MODE (TREE_TYPE (exp1)))));
7748 if (modifier == EXPAND_STACK_PARM)
7751 /* Check for multiplying things that have been extended
7752 from a narrower type. If this machine supports multiplying
7753 in that narrower type with a result in the desired type,
7754 do it that way, and avoid the explicit type-conversion. */
7755 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7756 && TREE_CODE (type) == INTEGER_TYPE
7757 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7758 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7759 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7760 && int_fits_type_p (TREE_OPERAND (exp, 1),
7761 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7762 /* Don't use a widening multiply if a shift will do. */
7763 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7764 > HOST_BITS_PER_WIDE_INT)
7765 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7767 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7768 && (TYPE_PRECISION (TREE_TYPE
7769 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7770 == TYPE_PRECISION (TREE_TYPE
7772 (TREE_OPERAND (exp, 0), 0))))
7773 /* If both operands are extended, they must either both
7774 be zero-extended or both be sign-extended. */
7775 && (TYPE_UNSIGNED (TREE_TYPE
7776 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7777 == TYPE_UNSIGNED (TREE_TYPE
7779 (TREE_OPERAND (exp, 0), 0)))))))
7781 tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
7782 enum machine_mode innermode = TYPE_MODE (op0type);
7783 bool zextend_p = TYPE_UNSIGNED (op0type);
7784 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
7785 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
7787 if (mode == GET_MODE_WIDER_MODE (innermode))
7789 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7791 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7792 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7793 TREE_OPERAND (exp, 1),
7794 NULL_RTX, &op0, &op1, 0);
7796 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7797 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7798 NULL_RTX, &op0, &op1, 0);
7801 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7802 && innermode == word_mode)
7805 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7806 NULL_RTX, VOIDmode, 0);
7807 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7808 op1 = convert_modes (innermode, mode,
7809 expand_expr (TREE_OPERAND (exp, 1),
7810 NULL_RTX, VOIDmode, 0),
7813 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7814 NULL_RTX, VOIDmode, 0);
7815 temp = expand_binop (mode, other_optab, op0, op1, target,
7816 unsignedp, OPTAB_LIB_WIDEN);
7817 hipart = gen_highpart (innermode, temp);
7818 htem = expand_mult_highpart_adjust (innermode, hipart,
7822 emit_move_insn (hipart, htem);
7823 return REDUCE_BIT_FIELD (temp);
7827 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7828 subtarget, &op0, &op1, 0);
7829 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
7831 case TRUNC_DIV_EXPR:
7832 case FLOOR_DIV_EXPR:
7834 case ROUND_DIV_EXPR:
7835 case EXACT_DIV_EXPR:
7836 if (modifier == EXPAND_STACK_PARM)
7838 /* Possible optimization: compute the dividend with EXPAND_SUM
7839 then if the divisor is constant can optimize the case
7840 where some terms of the dividend have coeffs divisible by it. */
7841 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7842 subtarget, &op0, &op1, 0);
7843 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7846 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7847 expensive divide. If not, combine will rebuild the original
7849 if (flag_unsafe_math_optimizations && optimize && !optimize_size
7850 && TREE_CODE (type) == REAL_TYPE
7851 && !real_onep (TREE_OPERAND (exp, 0)))
7852 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7853 build (RDIV_EXPR, type,
7854 build_real (type, dconst1),
7855 TREE_OPERAND (exp, 1))),
7856 target, tmode, modifier);
7857 this_optab = sdiv_optab;
7860 case TRUNC_MOD_EXPR:
7861 case FLOOR_MOD_EXPR:
7863 case ROUND_MOD_EXPR:
7864 if (modifier == EXPAND_STACK_PARM)
7866 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7867 subtarget, &op0, &op1, 0);
7868 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7870 case FIX_ROUND_EXPR:
7871 case FIX_FLOOR_EXPR:
7873 abort (); /* Not used for C. */
7875 case FIX_TRUNC_EXPR:
7876 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7877 if (target == 0 || modifier == EXPAND_STACK_PARM)
7878 target = gen_reg_rtx (mode);
7879 expand_fix (target, op0, unsignedp);
7883 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7884 if (target == 0 || modifier == EXPAND_STACK_PARM)
7885 target = gen_reg_rtx (mode);
7886 /* expand_float can't figure out what to do if FROM has VOIDmode.
7887 So give it the correct mode. With -O, cse will optimize this. */
7888 if (GET_MODE (op0) == VOIDmode)
7889 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7891 expand_float (target, op0,
7892 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7896 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7897 if (modifier == EXPAND_STACK_PARM)
7899 temp = expand_unop (mode,
7900 ! unsignedp && flag_trapv
7901 && (GET_MODE_CLASS(mode) == MODE_INT)
7902 ? negv_optab : neg_optab, op0, target, 0);
7905 return REDUCE_BIT_FIELD (temp);
7908 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7909 if (modifier == EXPAND_STACK_PARM)
7912 /* ABS_EXPR is not valid for complex arguments. */
7913 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7914 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7917 /* Unsigned abs is simply the operand. Testing here means we don't
7918 risk generating incorrect code below. */
7919 if (TYPE_UNSIGNED (type))
7922 return expand_abs (mode, op0, target, unsignedp,
7923 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7927 target = original_target;
7929 || modifier == EXPAND_STACK_PARM
7930 || (MEM_P (target) && MEM_VOLATILE_P (target))
7931 || GET_MODE (target) != mode
7933 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7934 target = gen_reg_rtx (mode);
7935 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7936 target, &op0, &op1, 0);
7938 /* First try to do it with a special MIN or MAX instruction.
7939 If that does not win, use a conditional jump to select the proper
7941 this_optab = (unsignedp
7942 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7943 : (code == MIN_EXPR ? smin_optab : smax_optab));
7945 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7950 /* At this point, a MEM target is no longer useful; we will get better
7954 target = gen_reg_rtx (mode);
7956 /* If op1 was placed in target, swap op0 and op1. */
7957 if (target != op0 && target == op1)
7965 emit_move_insn (target, op0);
7967 op0 = gen_label_rtx ();
7969 /* If this mode is an integer too wide to compare properly,
7970 compare word by word. Rely on cse to optimize constant cases. */
7971 if (GET_MODE_CLASS (mode) == MODE_INT
7972 && ! can_compare_p (GE, mode, ccp_jump))
7974 if (code == MAX_EXPR)
7975 do_jump_by_parts_greater_rtx (mode, unsignedp, target, op1,
7978 do_jump_by_parts_greater_rtx (mode, unsignedp, op1, target,
7983 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7984 unsignedp, mode, NULL_RTX, NULL_RTX, op0);
7986 emit_move_insn (target, op1);
7991 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7992 if (modifier == EXPAND_STACK_PARM)
7994 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7999 /* ??? Can optimize bitwise operations with one arg constant.
8000 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8001 and (a bitwise1 b) bitwise2 b (etc)
8002 but that is probably not worth while. */
8004 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8005 boolean values when we want in all cases to compute both of them. In
8006 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8007 as actual zero-or-1 values and then bitwise anding. In cases where
8008 there cannot be any side effects, better code would be made by
8009 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8010 how to recognize those cases. */
8012 case TRUTH_AND_EXPR:
8014 this_optab = and_optab;
8019 this_optab = ior_optab;
8022 case TRUTH_XOR_EXPR:
8024 this_optab = xor_optab;
8031 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8033 if (modifier == EXPAND_STACK_PARM)
8035 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8036 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8039 /* Could determine the answer when only additive constants differ. Also,
8040 the addition of one can be handled by changing the condition. */
8047 case UNORDERED_EXPR:
8055 temp = do_store_flag (exp,
8056 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8057 tmode != VOIDmode ? tmode : mode, 0);
8061 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8062 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8064 && REG_P (original_target)
8065 && (GET_MODE (original_target)
8066 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8068 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8071 /* If temp is constant, we can just compute the result. */
8072 if (GET_CODE (temp) == CONST_INT)
8074 if (INTVAL (temp) != 0)
8075 emit_move_insn (target, const1_rtx);
8077 emit_move_insn (target, const0_rtx);
8082 if (temp != original_target)
8084 enum machine_mode mode1 = GET_MODE (temp);
8085 if (mode1 == VOIDmode)
8086 mode1 = tmode != VOIDmode ? tmode : mode;
8088 temp = copy_to_mode_reg (mode1, temp);
8091 op1 = gen_label_rtx ();
8092 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8093 GET_MODE (temp), unsignedp, op1);
8094 emit_move_insn (temp, const1_rtx);
8099 /* If no set-flag instruction, must generate a conditional
8100 store into a temporary variable. Drop through
8101 and handle this like && and ||. */
8103 case TRUTH_ANDIF_EXPR:
8104 case TRUTH_ORIF_EXPR:
8107 || modifier == EXPAND_STACK_PARM
8108 || ! safe_from_p (target, exp, 1)
8109 /* Make sure we don't have a hard reg (such as function's return
8110 value) live across basic blocks, if not optimizing. */
8111 || (!optimize && REG_P (target)
8112 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8113 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8116 emit_clr_insn (target);
8118 op1 = gen_label_rtx ();
8119 jumpifnot (exp, op1);
8122 emit_0_to_1_insn (target);
8125 return ignore ? const0_rtx : target;
8127 case TRUTH_NOT_EXPR:
8128 if (modifier == EXPAND_STACK_PARM)
8130 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8131 /* The parser is careful to generate TRUTH_NOT_EXPR
8132 only with operands that are always zero or one. */
8133 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8134 target, 1, OPTAB_LIB_WIDEN);
8140 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8142 return expand_expr_real (TREE_OPERAND (exp, 1),
8143 (ignore ? const0_rtx : target),
8144 VOIDmode, modifier, alt_rtl);
8146 case STATEMENT_LIST:
8148 tree_stmt_iterator iter;
8153 for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
8154 expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
8159 /* If it's void, we don't need to worry about computing a value. */
8160 if (VOID_TYPE_P (TREE_TYPE (exp)))
8162 tree pred = TREE_OPERAND (exp, 0);
8163 tree then_ = TREE_OPERAND (exp, 1);
8164 tree else_ = TREE_OPERAND (exp, 2);
8166 if (TREE_CODE (then_) == GOTO_EXPR
8167 && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL)
8169 jumpif (pred, label_rtx (GOTO_DESTINATION (then_)));
8170 return expand_expr (else_, const0_rtx, VOIDmode, 0);
8172 else if (TREE_CODE (else_) == GOTO_EXPR
8173 && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL)
8175 jumpifnot (pred, label_rtx (GOTO_DESTINATION (else_)));
8176 return expand_expr (then_, const0_rtx, VOIDmode, 0);
8179 /* Just use the 'if' machinery. */
8180 expand_start_cond (pred, 0);
8181 expand_expr (then_, const0_rtx, VOIDmode, 0);
8185 /* Iterate over 'else if's instead of recursing. */
8186 for (; TREE_CODE (exp) == COND_EXPR; exp = TREE_OPERAND (exp, 2))
8188 expand_start_else ();
8189 if (EXPR_HAS_LOCATION (exp))
8191 emit_line_note (EXPR_LOCATION (exp));
8192 record_block_change (TREE_BLOCK (exp));
8194 expand_elseif (TREE_OPERAND (exp, 0));
8195 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, 0);
8197 /* Don't emit the jump and label if there's no 'else' clause. */
8198 if (TREE_SIDE_EFFECTS (exp))
8200 expand_start_else ();
8201 expand_expr (exp, const0_rtx, VOIDmode, 0);
8207 /* If we would have a "singleton" (see below) were it not for a
8208 conversion in each arm, bring that conversion back out. */
8209 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8210 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8211 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8212 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8214 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8215 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8217 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8218 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8219 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8220 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8221 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8222 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8223 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8224 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8225 return expand_expr (build1 (NOP_EXPR, type,
8226 build (COND_EXPR, TREE_TYPE (iftrue),
8227 TREE_OPERAND (exp, 0),
8229 target, tmode, modifier);
8233 /* Note that COND_EXPRs whose type is a structure or union
8234 are required to be constructed to contain assignments of
8235 a temporary variable, so that we can evaluate them here
8236 for side effect only. If type is void, we must do likewise. */
8238 /* If an arm of the branch requires a cleanup,
8239 only that cleanup is performed. */
8242 tree binary_op = 0, unary_op = 0;
8244 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8245 convert it to our mode, if necessary. */
8246 if (integer_onep (TREE_OPERAND (exp, 1))
8247 && integer_zerop (TREE_OPERAND (exp, 2))
8248 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8252 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8257 if (modifier == EXPAND_STACK_PARM)
8259 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
8260 if (GET_MODE (op0) == mode)
8264 target = gen_reg_rtx (mode);
8265 convert_move (target, op0, unsignedp);
8269 /* Check for X ? A + B : A. If we have this, we can copy A to the
8270 output and conditionally add B. Similarly for unary operations.
8271 Don't do this if X has side-effects because those side effects
8272 might affect A or B and the "?" operation is a sequence point in
8273 ANSI. (operand_equal_p tests for side effects.) */
8275 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8276 && operand_equal_p (TREE_OPERAND (exp, 2),
8277 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8278 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8279 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8280 && operand_equal_p (TREE_OPERAND (exp, 1),
8281 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8282 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8283 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8284 && operand_equal_p (TREE_OPERAND (exp, 2),
8285 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8286 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8287 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8288 && operand_equal_p (TREE_OPERAND (exp, 1),
8289 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8290 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8292 /* If we are not to produce a result, we have no target. Otherwise,
8293 if a target was specified use it; it will not be used as an
8294 intermediate target unless it is safe. If no target, use a
8299 else if (modifier == EXPAND_STACK_PARM)
8300 temp = assign_temp (type, 0, 0, 1);
8301 else if (original_target
8302 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8303 || (singleton && REG_P (original_target)
8304 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8305 && original_target == var_rtx (singleton)))
8306 && GET_MODE (original_target) == mode
8307 #ifdef HAVE_conditional_move
8308 && (! can_conditionally_move_p (mode)
8309 || REG_P (original_target)
8310 || TREE_ADDRESSABLE (type))
8312 && (!MEM_P (original_target)
8313 || TREE_ADDRESSABLE (type)))
8314 temp = original_target;
8315 else if (TREE_ADDRESSABLE (type))
8318 temp = assign_temp (type, 0, 0, 1);
8320 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8321 do the test of X as a store-flag operation, do this as
8322 A + ((X != 0) << log C). Similarly for other simple binary
8323 operators. Only do for C == 1 if BRANCH_COST is low. */
8324 if (temp && singleton && binary_op
8325 && (TREE_CODE (binary_op) == PLUS_EXPR
8326 || TREE_CODE (binary_op) == MINUS_EXPR
8327 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8328 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8329 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8330 : integer_onep (TREE_OPERAND (binary_op, 1)))
8331 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8335 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8336 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8337 ? addv_optab : add_optab)
8338 : TREE_CODE (binary_op) == MINUS_EXPR
8339 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8340 ? subv_optab : sub_optab)
8341 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8344 /* If we had X ? A : A + 1, do this as A + (X == 0). */
8345 if (singleton == TREE_OPERAND (exp, 1))
8346 cond = invert_truthvalue (TREE_OPERAND (exp, 0));
8348 cond = TREE_OPERAND (exp, 0);
8350 result = do_store_flag (cond, (safe_from_p (temp, singleton, 1)
8352 mode, BRANCH_COST <= 1);
8354 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8355 result = expand_shift (LSHIFT_EXPR, mode, result,
8356 build_int_2 (tree_log2
8360 (safe_from_p (temp, singleton, 1)
8361 ? temp : NULL_RTX), 0);
8365 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8366 return expand_binop (mode, boptab, op1, result, temp,
8367 unsignedp, OPTAB_LIB_WIDEN);
8371 do_pending_stack_adjust ();
8373 op0 = gen_label_rtx ();
8375 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8379 /* If the target conflicts with the other operand of the
8380 binary op, we can't use it. Also, we can't use the target
8381 if it is a hard register, because evaluating the condition
8382 might clobber it. */
8384 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8386 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8387 temp = gen_reg_rtx (mode);
8388 store_expr (singleton, temp,
8389 modifier == EXPAND_STACK_PARM ? 2 : 0);
8392 expand_expr (singleton,
8393 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8394 if (singleton == TREE_OPERAND (exp, 1))
8395 jumpif (TREE_OPERAND (exp, 0), op0);
8397 jumpifnot (TREE_OPERAND (exp, 0), op0);
8399 if (binary_op && temp == 0)
8400 /* Just touch the other operand. */
8401 expand_expr (TREE_OPERAND (binary_op, 1),
8402 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8404 store_expr (build (TREE_CODE (binary_op), type,
8405 make_tree (type, temp),
8406 TREE_OPERAND (binary_op, 1)),
8407 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8409 store_expr (build1 (TREE_CODE (unary_op), type,
8410 make_tree (type, temp)),
8411 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8414 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8415 comparison operator. If we have one of these cases, set the
8416 output to A, branch on A (cse will merge these two references),
8417 then set the output to FOO. */
8419 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8420 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8421 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8422 TREE_OPERAND (exp, 1), 0)
8423 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8424 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8425 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8428 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8429 temp = gen_reg_rtx (mode);
8430 store_expr (TREE_OPERAND (exp, 1), temp,
8431 modifier == EXPAND_STACK_PARM ? 2 : 0);
8432 jumpif (TREE_OPERAND (exp, 0), op0);
8434 if (TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8435 store_expr (TREE_OPERAND (exp, 2), temp,
8436 modifier == EXPAND_STACK_PARM ? 2 : 0);
8438 expand_expr (TREE_OPERAND (exp, 2),
8439 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8443 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8444 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8445 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8446 TREE_OPERAND (exp, 2), 0)
8447 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8448 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8449 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8452 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8453 temp = gen_reg_rtx (mode);
8454 store_expr (TREE_OPERAND (exp, 2), temp,
8455 modifier == EXPAND_STACK_PARM ? 2 : 0);
8456 jumpifnot (TREE_OPERAND (exp, 0), op0);
8458 if (TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8459 store_expr (TREE_OPERAND (exp, 1), temp,
8460 modifier == EXPAND_STACK_PARM ? 2 : 0);
8462 expand_expr (TREE_OPERAND (exp, 1),
8463 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8468 op1 = gen_label_rtx ();
8469 jumpifnot (TREE_OPERAND (exp, 0), op0);
8471 /* One branch of the cond can be void, if it never returns. For
8472 example A ? throw : E */
8474 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8475 store_expr (TREE_OPERAND (exp, 1), temp,
8476 modifier == EXPAND_STACK_PARM ? 2 : 0);
8478 expand_expr (TREE_OPERAND (exp, 1),
8479 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8481 emit_jump_insn (gen_jump (op1));
8485 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8486 store_expr (TREE_OPERAND (exp, 2), temp,
8487 modifier == EXPAND_STACK_PARM ? 2 : 0);
8489 expand_expr (TREE_OPERAND (exp, 2),
8490 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8502 tree lhs = TREE_OPERAND (exp, 0);
8503 tree rhs = TREE_OPERAND (exp, 1);
8505 temp = expand_assignment (lhs, rhs, ! ignore);
8511 /* If lhs is complex, expand calls in rhs before computing it.
8512 That's so we don't compute a pointer and save it over a
8513 call. If lhs is simple, compute it first so we can give it
8514 as a target if the rhs is just a call. This avoids an
8515 extra temp and copy and that prevents a partial-subsumption
8516 which makes bad code. Actually we could treat
8517 component_ref's of vars like vars. */
8519 tree lhs = TREE_OPERAND (exp, 0);
8520 tree rhs = TREE_OPERAND (exp, 1);
8524 /* Check for |= or &= of a bitfield of size one into another bitfield
8525 of size 1. In this case, (unless we need the result of the
8526 assignment) we can do this more efficiently with a
8527 test followed by an assignment, if necessary.
8529 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8530 things change so we do, this code should be enhanced to
8533 && TREE_CODE (lhs) == COMPONENT_REF
8534 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8535 || TREE_CODE (rhs) == BIT_AND_EXPR)
8536 && TREE_OPERAND (rhs, 0) == lhs
8537 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8538 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8539 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8541 rtx label = gen_label_rtx ();
8543 do_jump (TREE_OPERAND (rhs, 1),
8544 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8545 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8546 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8547 (TREE_CODE (rhs) == BIT_IOR_EXPR
8549 : integer_zero_node)),
8551 do_pending_stack_adjust ();
8556 temp = expand_assignment (lhs, rhs, ! ignore);
8562 if (!TREE_OPERAND (exp, 0))
8563 expand_null_return ();
8565 expand_return (TREE_OPERAND (exp, 0));
8568 case PREINCREMENT_EXPR:
8569 case PREDECREMENT_EXPR:
8570 return REDUCE_BIT_FIELD (expand_increment (exp, 0, ignore));
8572 case POSTINCREMENT_EXPR:
8573 case POSTDECREMENT_EXPR:
8574 /* Faster to treat as pre-increment if result is not used. */
8575 return REDUCE_BIT_FIELD (expand_increment (exp, ! ignore, ignore));
8578 if (modifier == EXPAND_STACK_PARM)
8580 /* If we are taking the address of something erroneous, just
8582 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8584 /* If we are taking the address of a constant and are at the
8585 top level, we have to use output_constant_def since we can't
8586 call force_const_mem at top level. */
8588 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8589 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
8591 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
8594 /* We make sure to pass const0_rtx down if we came in with
8595 ignore set, to avoid doing the cleanups twice for something. */
8596 op0 = expand_expr (TREE_OPERAND (exp, 0),
8597 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8598 (modifier == EXPAND_INITIALIZER
8599 ? modifier : EXPAND_CONST_ADDRESS));
8601 /* If we are going to ignore the result, OP0 will have been set
8602 to const0_rtx, so just return it. Don't get confused and
8603 think we are taking the address of the constant. */
8607 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8608 clever and returns a REG when given a MEM. */
8609 op0 = protect_from_queue (op0, 1);
8611 /* We would like the object in memory. If it is a constant, we can
8612 have it be statically allocated into memory. For a non-constant,
8613 we need to allocate some memory and store the value into it. */
8615 if (CONSTANT_P (op0))
8616 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8618 else if (REG_P (op0) || GET_CODE (op0) == SUBREG
8619 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == PARALLEL
8620 || GET_CODE (op0) == LO_SUM)
8622 /* If this object is in a register, it can't be BLKmode. */
8623 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8624 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8626 if (GET_CODE (op0) == PARALLEL)
8627 /* Handle calls that pass values in multiple
8628 non-contiguous locations. The Irix 6 ABI has examples
8630 emit_group_store (memloc, op0, inner_type,
8631 int_size_in_bytes (inner_type));
8633 emit_move_insn (memloc, op0);
8641 mark_temp_addr_taken (op0);
8642 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8644 op0 = XEXP (op0, 0);
8645 if (GET_MODE (op0) == Pmode && mode == ptr_mode)
8646 op0 = convert_memory_address (ptr_mode, op0);
8650 /* If OP0 is not aligned as least as much as the type requires, we
8651 need to make a temporary, copy OP0 to it, and take the address of
8652 the temporary. We want to use the alignment of the type, not of
8653 the operand. Note that this is incorrect for FUNCTION_TYPE, but
8654 the test for BLKmode means that can't happen. The test for
8655 BLKmode is because we never make mis-aligned MEMs with
8658 We don't need to do this at all if the machine doesn't have
8659 strict alignment. */
8660 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
8661 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
8663 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
8665 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8668 if (TYPE_ALIGN_OK (inner_type))
8671 if (TREE_ADDRESSABLE (inner_type))
8673 /* We can't make a bitwise copy of this object, so fail. */
8674 error ("cannot take the address of an unaligned member");
8678 new = assign_stack_temp_for_type
8679 (TYPE_MODE (inner_type),
8680 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
8681 : int_size_in_bytes (inner_type),
8682 1, build_qualified_type (inner_type,
8683 (TYPE_QUALS (inner_type)
8684 | TYPE_QUAL_CONST)));
8686 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)),
8687 (modifier == EXPAND_STACK_PARM
8688 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
8693 op0 = force_operand (XEXP (op0, 0), target);
8698 && modifier != EXPAND_CONST_ADDRESS
8699 && modifier != EXPAND_INITIALIZER
8700 && modifier != EXPAND_SUM)
8701 op0 = force_reg (Pmode, op0);
8704 && ! REG_USERVAR_P (op0))
8705 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8707 if (GET_MODE (op0) == Pmode && mode == ptr_mode)
8708 op0 = convert_memory_address (ptr_mode, op0);
8712 case ENTRY_VALUE_EXPR:
8715 /* COMPLEX type for Extended Pascal & Fortran */
8718 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8721 /* Get the rtx code of the operands. */
8722 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8723 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8726 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8730 /* Move the real (op0) and imaginary (op1) parts to their location. */
8731 emit_move_insn (gen_realpart (mode, target), op0);
8732 emit_move_insn (gen_imagpart (mode, target), op1);
8734 insns = get_insns ();
8737 /* Complex construction should appear as a single unit. */
8738 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8739 each with a separate pseudo as destination.
8740 It's not correct for flow to treat them as a unit. */
8741 if (GET_CODE (target) != CONCAT)
8742 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8750 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8751 return gen_realpart (mode, op0);
8754 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8755 return gen_imagpart (mode, op0);
8759 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8763 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8766 target = gen_reg_rtx (mode);
8770 /* Store the realpart and the negated imagpart to target. */
8771 emit_move_insn (gen_realpart (partmode, target),
8772 gen_realpart (partmode, op0));
8774 imag_t = gen_imagpart (partmode, target);
8775 temp = expand_unop (partmode,
8776 ! unsignedp && flag_trapv
8777 && (GET_MODE_CLASS(partmode) == MODE_INT)
8778 ? negv_optab : neg_optab,
8779 gen_imagpart (partmode, op0), imag_t, 0);
8781 emit_move_insn (imag_t, temp);
8783 insns = get_insns ();
8786 /* Conjugate should appear as a single unit
8787 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8788 each with a separate pseudo as destination.
8789 It's not correct for flow to treat them as a unit. */
8790 if (GET_CODE (target) != CONCAT)
8791 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8799 expand_resx_expr (exp);
8802 case TRY_CATCH_EXPR:
8804 case EH_FILTER_EXPR:
8805 case TRY_FINALLY_EXPR:
8806 /* Lowered by tree-eh.c. */
8809 case WITH_CLEANUP_EXPR:
8810 case CLEANUP_POINT_EXPR:
8812 /* Lowered by gimplify.c. */
8816 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8819 return get_exception_pointer (cfun);
8822 return get_exception_filter (cfun);
8825 /* Function descriptors are not valid except for as
8826 initialization constants, and should not be expanded. */
8830 expand_start_case (0, SWITCH_COND (exp), integer_type_node,
8832 if (SWITCH_BODY (exp))
8833 expand_expr_stmt (SWITCH_BODY (exp));
8834 if (SWITCH_LABELS (exp))
8837 tree vec = SWITCH_LABELS (exp);
8838 size_t i, n = TREE_VEC_LENGTH (vec);
8840 for (i = 0; i < n; ++i)
8842 tree elt = TREE_VEC_ELT (vec, i);
8843 tree controlling_expr_type = TREE_TYPE (SWITCH_COND (exp));
8844 tree min_value = TYPE_MIN_VALUE (controlling_expr_type);
8845 tree max_value = TYPE_MAX_VALUE (controlling_expr_type);
8847 tree case_low = CASE_LOW (elt);
8848 tree case_high = CASE_HIGH (elt) ? CASE_HIGH (elt) : case_low;
8849 if (case_low && case_high)
8851 /* Case label is less than minimum for type. */
8852 if (TREE_CODE (min_value) == INTEGER_CST
8853 && tree_int_cst_compare (case_low, min_value) < 0
8854 && tree_int_cst_compare (case_high, min_value) < 0)
8856 warning ("case label value %d is less than minimum value for type",
8857 TREE_INT_CST (case_low));
8861 /* Case value is greater than maximum for type. */
8862 if (TREE_CODE (max_value) == INTEGER_CST
8863 && tree_int_cst_compare (case_low, max_value) > 0
8864 && tree_int_cst_compare (case_high, max_value) > 0)
8866 warning ("case label value %d exceeds maximum value for type",
8867 TREE_INT_CST (case_high));
8871 /* Saturate lower case label value to minimum. */
8872 if (TREE_CODE (min_value) == INTEGER_CST
8873 && tree_int_cst_compare (case_high, min_value) >= 0
8874 && tree_int_cst_compare (case_low, min_value) < 0)
8876 warning ("lower value %d in case label range less than minimum value for type",
8877 TREE_INT_CST (case_low));
8878 case_low = min_value;
8881 /* Saturate upper case label value to maximum. */
8882 if (TREE_CODE (max_value) == INTEGER_CST
8883 && tree_int_cst_compare (case_low, max_value) <= 0
8884 && tree_int_cst_compare (case_high, max_value) > 0)
8886 warning ("upper value %d in case label range exceeds maximum value for type",
8887 TREE_INT_CST (case_high));
8888 case_high = max_value;
8892 add_case_node (case_low, case_high, CASE_LABEL (elt), &duplicate, true);
8897 expand_end_case_type (SWITCH_COND (exp), TREE_TYPE (exp));
8901 expand_label (TREE_OPERAND (exp, 0));
8904 case CASE_LABEL_EXPR:
8907 add_case_node (CASE_LOW (exp), CASE_HIGH (exp), CASE_LABEL (exp),
8915 expand_asm_expr (exp);
8919 return lang_hooks.expand_expr (exp, original_target, tmode,
8923 /* Here to do an ordinary binary operator, generating an instruction
8924 from the optab already placed in `this_optab'. */
8926 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8927 subtarget, &op0, &op1, 0);
8929 if (modifier == EXPAND_STACK_PARM)
8931 temp = expand_binop (mode, this_optab, op0, op1, target,
8932 unsignedp, OPTAB_LIB_WIDEN);
8935 return REDUCE_BIT_FIELD (temp);
8937 #undef REDUCE_BIT_FIELD
8939 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
8940 signedness of TYPE), possibly returning the result in TARGET. */
8942 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
8944 HOST_WIDE_INT prec = TYPE_PRECISION (type);
8945 if (target && GET_MODE (target) != GET_MODE (exp))
8947 if (TYPE_UNSIGNED (type))
8950 if (prec < HOST_BITS_PER_WIDE_INT)
8951 mask = immed_double_const (((unsigned HOST_WIDE_INT) 1 << prec) - 1, 0,
8954 mask = immed_double_const ((unsigned HOST_WIDE_INT) -1,
8955 ((unsigned HOST_WIDE_INT) 1
8956 << (prec - HOST_BITS_PER_WIDE_INT)) - 1,
8958 return expand_and (GET_MODE (exp), exp, mask, target);
8962 tree count = build_int_2 (GET_MODE_BITSIZE (GET_MODE (exp)) - prec, 0);
8963 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8964 return expand_shift (RSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8968 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
8969 when applied to the address of EXP produces an address known to be
8970 aligned more than BIGGEST_ALIGNMENT. */
8973 is_aligning_offset (tree offset, tree exp)
8975 /* Strip off any conversions. */
8976 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8977 || TREE_CODE (offset) == NOP_EXPR
8978 || TREE_CODE (offset) == CONVERT_EXPR)
8979 offset = TREE_OPERAND (offset, 0);
8981 /* We must now have a BIT_AND_EXPR with a constant that is one less than
8982 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
8983 if (TREE_CODE (offset) != BIT_AND_EXPR
8984 || !host_integerp (TREE_OPERAND (offset, 1), 1)
8985 || compare_tree_int (TREE_OPERAND (offset, 1),
8986 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
8987 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
8990 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
8991 It must be NEGATE_EXPR. Then strip any more conversions. */
8992 offset = TREE_OPERAND (offset, 0);
8993 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8994 || TREE_CODE (offset) == NOP_EXPR
8995 || TREE_CODE (offset) == CONVERT_EXPR)
8996 offset = TREE_OPERAND (offset, 0);
8998 if (TREE_CODE (offset) != NEGATE_EXPR)
9001 offset = TREE_OPERAND (offset, 0);
9002 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9003 || TREE_CODE (offset) == NOP_EXPR
9004 || TREE_CODE (offset) == CONVERT_EXPR)
9005 offset = TREE_OPERAND (offset, 0);
9007 /* This must now be the address of EXP. */
9008 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
9011 /* Return the tree node if an ARG corresponds to a string constant or zero
9012 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9013 in bytes within the string that ARG is accessing. The type of the
9014 offset will be `sizetype'. */
9017 string_constant (tree arg, tree *ptr_offset)
9021 if (TREE_CODE (arg) == ADDR_EXPR
9022 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9024 *ptr_offset = size_zero_node;
9025 return TREE_OPERAND (arg, 0);
9027 if (TREE_CODE (arg) == ADDR_EXPR
9028 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
9029 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg, 0), 0)) == STRING_CST)
9031 *ptr_offset = convert (sizetype, TREE_OPERAND (TREE_OPERAND (arg, 0), 1));
9032 return TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
9034 else if (TREE_CODE (arg) == PLUS_EXPR)
9036 tree arg0 = TREE_OPERAND (arg, 0);
9037 tree arg1 = TREE_OPERAND (arg, 1);
9042 if (TREE_CODE (arg0) == ADDR_EXPR
9043 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9045 *ptr_offset = convert (sizetype, arg1);
9046 return TREE_OPERAND (arg0, 0);
9048 else if (TREE_CODE (arg1) == ADDR_EXPR
9049 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9051 *ptr_offset = convert (sizetype, arg0);
9052 return TREE_OPERAND (arg1, 0);
9059 /* Expand code for a post- or pre- increment or decrement
9060 and return the RTX for the result.
9061 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9064 expand_increment (tree exp, int post, int ignore)
9068 tree incremented = TREE_OPERAND (exp, 0);
9069 optab this_optab = add_optab;
9071 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9072 int op0_is_copy = 0;
9073 int single_insn = 0;
9074 /* 1 means we can't store into OP0 directly,
9075 because it is a subreg narrower than a word,
9076 and we don't dare clobber the rest of the word. */
9079 /* Stabilize any component ref that might need to be
9080 evaluated more than once below. */
9082 || TREE_CODE (incremented) == BIT_FIELD_REF
9083 || (TREE_CODE (incremented) == COMPONENT_REF
9084 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9085 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9086 incremented = stabilize_reference (incremented);
9087 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9088 ones into save exprs so that they don't accidentally get evaluated
9089 more than once by the code below. */
9090 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9091 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9092 incremented = save_expr (incremented);
9094 /* Compute the operands as RTX.
9095 Note whether OP0 is the actual lvalue or a copy of it:
9096 I believe it is a copy iff it is a register or subreg
9097 and insns were generated in computing it. */
9099 temp = get_last_insn ();
9100 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
9102 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9103 in place but instead must do sign- or zero-extension during assignment,
9104 so we copy it into a new register and let the code below use it as
9107 Note that we can safely modify this SUBREG since it is know not to be
9108 shared (it was made by the expand_expr call above). */
9110 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9113 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9117 else if (GET_CODE (op0) == SUBREG
9118 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9120 /* We cannot increment this SUBREG in place. If we are
9121 post-incrementing, get a copy of the old value. Otherwise,
9122 just mark that we cannot increment in place. */
9124 op0 = copy_to_reg (op0);
9129 op0_is_copy = ((GET_CODE (op0) == SUBREG || REG_P (op0))
9130 && temp != get_last_insn ());
9131 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9133 /* Decide whether incrementing or decrementing. */
9134 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9135 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9136 this_optab = sub_optab;
9138 /* Convert decrement by a constant into a negative increment. */
9139 if (this_optab == sub_optab
9140 && GET_CODE (op1) == CONST_INT)
9142 op1 = GEN_INT (-INTVAL (op1));
9143 this_optab = add_optab;
9146 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9147 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9149 /* For a preincrement, see if we can do this with a single instruction. */
9152 icode = (int) this_optab->handlers[(int) mode].insn_code;
9153 if (icode != (int) CODE_FOR_nothing
9154 /* Make sure that OP0 is valid for operands 0 and 1
9155 of the insn we want to queue. */
9156 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9157 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9158 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9162 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9163 then we cannot just increment OP0. We must therefore contrive to
9164 increment the original value. Then, for postincrement, we can return
9165 OP0 since it is a copy of the old value. For preincrement, expand here
9166 unless we can do it with a single insn.
9168 Likewise if storing directly into OP0 would clobber high bits
9169 we need to preserve (bad_subreg). */
9170 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9172 /* This is the easiest way to increment the value wherever it is.
9173 Problems with multiple evaluation of INCREMENTED are prevented
9174 because either (1) it is a component_ref or preincrement,
9175 in which case it was stabilized above, or (2) it is an array_ref
9176 with constant index in an array in a register, which is
9177 safe to reevaluate. */
9178 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9179 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9180 ? MINUS_EXPR : PLUS_EXPR),
9183 TREE_OPERAND (exp, 1));
9185 while (TREE_CODE (incremented) == NOP_EXPR
9186 || TREE_CODE (incremented) == CONVERT_EXPR)
9188 newexp = convert (TREE_TYPE (incremented), newexp);
9189 incremented = TREE_OPERAND (incremented, 0);
9192 temp = expand_assignment (incremented, newexp, ! post && ! ignore);
9193 return post ? op0 : temp;
9198 /* We have a true reference to the value in OP0.
9199 If there is an insn to add or subtract in this mode, queue it.
9200 Queuing the increment insn avoids the register shuffling
9201 that often results if we must increment now and first save
9202 the old value for subsequent use. */
9204 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9205 op0 = stabilize (op0);
9208 icode = (int) this_optab->handlers[(int) mode].insn_code;
9209 if (icode != (int) CODE_FOR_nothing
9210 /* Make sure that OP0 is valid for operands 0 and 1
9211 of the insn we want to queue. */
9212 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9213 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9215 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9216 op1 = force_reg (mode, op1);
9218 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9220 if (icode != (int) CODE_FOR_nothing && MEM_P (op0))
9222 rtx addr = (general_operand (XEXP (op0, 0), mode)
9223 ? force_reg (Pmode, XEXP (op0, 0))
9224 : copy_to_reg (XEXP (op0, 0)));
9227 op0 = replace_equiv_address (op0, addr);
9228 temp = force_reg (GET_MODE (op0), op0);
9229 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9230 op1 = force_reg (mode, op1);
9232 /* The increment queue is LIFO, thus we have to `queue'
9233 the instructions in reverse order. */
9234 enqueue_insn (op0, gen_move_insn (op0, temp));
9235 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9240 /* Preincrement, or we can't increment with one simple insn. */
9242 /* Save a copy of the value before inc or dec, to return it later. */
9243 temp = value = copy_to_reg (op0);
9245 /* Arrange to return the incremented value. */
9246 /* Copy the rtx because expand_binop will protect from the queue,
9247 and the results of that would be invalid for us to return
9248 if our caller does emit_queue before using our result. */
9249 temp = copy_rtx (value = op0);
9251 /* Increment however we can. */
9252 op1 = expand_binop (mode, this_optab, value, op1, op0,
9253 TYPE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9255 /* Make sure the value is stored into OP0. */
9257 emit_move_insn (op0, op1);
9262 /* Generate code to calculate EXP using a store-flag instruction
9263 and return an rtx for the result. EXP is either a comparison
9264 or a TRUTH_NOT_EXPR whose operand is a comparison.
9266 If TARGET is nonzero, store the result there if convenient.
9268 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
9271 Return zero if there is no suitable set-flag instruction
9272 available on this machine.
9274 Once expand_expr has been called on the arguments of the comparison,
9275 we are committed to doing the store flag, since it is not safe to
9276 re-evaluate the expression. We emit the store-flag insn by calling
9277 emit_store_flag, but only expand the arguments if we have a reason
9278 to believe that emit_store_flag will be successful. If we think that
9279 it will, but it isn't, we have to simulate the store-flag with a
9280 set/jump/set sequence. */
9283 do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
9286 tree arg0, arg1, type;
9288 enum machine_mode operand_mode;
9292 enum insn_code icode;
9293 rtx subtarget = target;
9296 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9297 result at the end. We can't simply invert the test since it would
9298 have already been inverted if it were valid. This case occurs for
9299 some floating-point comparisons. */
9301 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9302 invert = 1, exp = TREE_OPERAND (exp, 0);
9304 arg0 = TREE_OPERAND (exp, 0);
9305 arg1 = TREE_OPERAND (exp, 1);
9307 /* Don't crash if the comparison was erroneous. */
9308 if (arg0 == error_mark_node || arg1 == error_mark_node)
9311 type = TREE_TYPE (arg0);
9312 operand_mode = TYPE_MODE (type);
9313 unsignedp = TYPE_UNSIGNED (type);
9315 /* We won't bother with BLKmode store-flag operations because it would mean
9316 passing a lot of information to emit_store_flag. */
9317 if (operand_mode == BLKmode)
9320 /* We won't bother with store-flag operations involving function pointers
9321 when function pointers must be canonicalized before comparisons. */
9322 #ifdef HAVE_canonicalize_funcptr_for_compare
9323 if (HAVE_canonicalize_funcptr_for_compare
9324 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9325 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9327 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9328 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9329 == FUNCTION_TYPE))))
9336 /* Get the rtx comparison code to use. We know that EXP is a comparison
9337 operation of some type. Some comparisons against 1 and -1 can be
9338 converted to comparisons with zero. Do so here so that the tests
9339 below will be aware that we have a comparison with zero. These
9340 tests will not catch constants in the first operand, but constants
9341 are rarely passed as the first operand. */
9343 switch (TREE_CODE (exp))
9352 if (integer_onep (arg1))
9353 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9355 code = unsignedp ? LTU : LT;
9358 if (! unsignedp && integer_all_onesp (arg1))
9359 arg1 = integer_zero_node, code = LT;
9361 code = unsignedp ? LEU : LE;
9364 if (! unsignedp && integer_all_onesp (arg1))
9365 arg1 = integer_zero_node, code = GE;
9367 code = unsignedp ? GTU : GT;
9370 if (integer_onep (arg1))
9371 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9373 code = unsignedp ? GEU : GE;
9376 case UNORDERED_EXPR:
9405 /* Put a constant second. */
9406 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
9408 tem = arg0; arg0 = arg1; arg1 = tem;
9409 code = swap_condition (code);
9412 /* If this is an equality or inequality test of a single bit, we can
9413 do this by shifting the bit being tested to the low-order bit and
9414 masking the result with the constant 1. If the condition was EQ,
9415 we xor it with 1. This does not require an scc insn and is faster
9416 than an scc insn even if we have it.
9418 The code to make this transformation was moved into fold_single_bit_test,
9419 so we just call into the folder and expand its result. */
9421 if ((code == NE || code == EQ)
9422 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9423 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9425 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
9426 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
9428 target, VOIDmode, EXPAND_NORMAL);
9431 /* Now see if we are likely to be able to do this. Return if not. */
9432 if (! can_compare_p (code, operand_mode, ccp_store_flag))
9435 icode = setcc_gen_code[(int) code];
9436 if (icode == CODE_FOR_nothing
9437 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
9439 /* We can only do this if it is one of the special cases that
9440 can be handled without an scc insn. */
9441 if ((code == LT && integer_zerop (arg1))
9442 || (! only_cheap && code == GE && integer_zerop (arg1)))
9444 else if (BRANCH_COST >= 0
9445 && ! only_cheap && (code == NE || code == EQ)
9446 && TREE_CODE (type) != REAL_TYPE
9447 && ((abs_optab->handlers[(int) operand_mode].insn_code
9448 != CODE_FOR_nothing)
9449 || (ffs_optab->handlers[(int) operand_mode].insn_code
9450 != CODE_FOR_nothing)))
9456 if (! get_subtarget (target)
9457 || GET_MODE (subtarget) != operand_mode)
9460 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
9463 target = gen_reg_rtx (mode);
9465 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
9466 because, if the emit_store_flag does anything it will succeed and
9467 OP0 and OP1 will not be used subsequently. */
9469 result = emit_store_flag (target, code,
9470 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
9471 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
9472 operand_mode, unsignedp, 1);
9477 result = expand_binop (mode, xor_optab, result, const1_rtx,
9478 result, 0, OPTAB_LIB_WIDEN);
9482 /* If this failed, we have to do this with set/compare/jump/set code. */
9484 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9485 target = gen_reg_rtx (GET_MODE (target));
9487 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9488 result = compare_from_rtx (op0, op1, code, unsignedp,
9489 operand_mode, NULL_RTX);
9490 if (GET_CODE (result) == CONST_INT)
9491 return (((result == const0_rtx && ! invert)
9492 || (result != const0_rtx && invert))
9493 ? const0_rtx : const1_rtx);
9495 /* The code of RESULT may not match CODE if compare_from_rtx
9496 decided to swap its operands and reverse the original code.
9498 We know that compare_from_rtx returns either a CONST_INT or
9499 a new comparison code, so it is safe to just extract the
9500 code from RESULT. */
9501 code = GET_CODE (result);
9503 label = gen_label_rtx ();
9504 if (bcc_gen_fctn[(int) code] == 0)
9507 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
9508 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9515 /* Stubs in case we haven't got a casesi insn. */
9517 # define HAVE_casesi 0
9518 # define gen_casesi(a, b, c, d, e) (0)
9519 # define CODE_FOR_casesi CODE_FOR_nothing
9522 /* If the machine does not have a case insn that compares the bounds,
9523 this means extra overhead for dispatch tables, which raises the
9524 threshold for using them. */
9525 #ifndef CASE_VALUES_THRESHOLD
9526 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
9527 #endif /* CASE_VALUES_THRESHOLD */
9530 case_values_threshold (void)
9532 return CASE_VALUES_THRESHOLD;
9535 /* Attempt to generate a casesi instruction. Returns 1 if successful,
9536 0 otherwise (i.e. if there is no casesi instruction). */
9538 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
9539 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
9541 enum machine_mode index_mode = SImode;
9542 int index_bits = GET_MODE_BITSIZE (index_mode);
9543 rtx op1, op2, index;
9544 enum machine_mode op_mode;
9549 /* Convert the index to SImode. */
9550 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
9552 enum machine_mode omode = TYPE_MODE (index_type);
9553 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
9555 /* We must handle the endpoints in the original mode. */
9556 index_expr = build (MINUS_EXPR, index_type,
9557 index_expr, minval);
9558 minval = integer_zero_node;
9559 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9560 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
9561 omode, 1, default_label);
9562 /* Now we can safely truncate. */
9563 index = convert_to_mode (index_mode, index, 0);
9567 if (TYPE_MODE (index_type) != index_mode)
9569 index_expr = convert (lang_hooks.types.type_for_size
9570 (index_bits, 0), index_expr);
9571 index_type = TREE_TYPE (index_expr);
9574 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9577 index = protect_from_queue (index, 0);
9578 do_pending_stack_adjust ();
9580 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
9581 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
9583 index = copy_to_mode_reg (op_mode, index);
9585 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
9587 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
9588 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
9589 op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
9590 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
9592 op1 = copy_to_mode_reg (op_mode, op1);
9594 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
9596 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
9597 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
9598 op2, TYPE_UNSIGNED (TREE_TYPE (range)));
9599 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
9601 op2 = copy_to_mode_reg (op_mode, op2);
9603 emit_jump_insn (gen_casesi (index, op1, op2,
9604 table_label, default_label));
9608 /* Attempt to generate a tablejump instruction; same concept. */
9609 #ifndef HAVE_tablejump
9610 #define HAVE_tablejump 0
9611 #define gen_tablejump(x, y) (0)
9614 /* Subroutine of the next function.
9616 INDEX is the value being switched on, with the lowest value
9617 in the table already subtracted.
9618 MODE is its expected mode (needed if INDEX is constant).
9619 RANGE is the length of the jump table.
9620 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9622 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9623 index value is out of range. */
9626 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
9631 if (INTVAL (range) > cfun->max_jumptable_ents)
9632 cfun->max_jumptable_ents = INTVAL (range);
9634 /* Do an unsigned comparison (in the proper mode) between the index
9635 expression and the value which represents the length of the range.
9636 Since we just finished subtracting the lower bound of the range
9637 from the index expression, this comparison allows us to simultaneously
9638 check that the original index expression value is both greater than
9639 or equal to the minimum value of the range and less than or equal to
9640 the maximum value of the range. */
9642 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
9645 /* If index is in range, it must fit in Pmode.
9646 Convert to Pmode so we can index with it. */
9648 index = convert_to_mode (Pmode, index, 1);
9650 /* Don't let a MEM slip through, because then INDEX that comes
9651 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9652 and break_out_memory_refs will go to work on it and mess it up. */
9653 #ifdef PIC_CASE_VECTOR_ADDRESS
9654 if (flag_pic && !REG_P (index))
9655 index = copy_to_mode_reg (Pmode, index);
9658 /* If flag_force_addr were to affect this address
9659 it could interfere with the tricky assumptions made
9660 about addresses that contain label-refs,
9661 which may be valid only very near the tablejump itself. */
9662 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9663 GET_MODE_SIZE, because this indicates how large insns are. The other
9664 uses should all be Pmode, because they are addresses. This code
9665 could fail if addresses and insns are not the same size. */
9666 index = gen_rtx_PLUS (Pmode,
9667 gen_rtx_MULT (Pmode, index,
9668 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9669 gen_rtx_LABEL_REF (Pmode, table_label));
9670 #ifdef PIC_CASE_VECTOR_ADDRESS
9672 index = PIC_CASE_VECTOR_ADDRESS (index);
9675 index = memory_address_noforce (CASE_VECTOR_MODE, index);
9676 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9677 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
9678 RTX_UNCHANGING_P (vector) = 1;
9679 MEM_NOTRAP_P (vector) = 1;
9680 convert_move (temp, vector, 0);
9682 emit_jump_insn (gen_tablejump (temp, table_label));
9684 /* If we are generating PIC code or if the table is PC-relative, the
9685 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9686 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
9691 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
9692 rtx table_label, rtx default_label)
9696 if (! HAVE_tablejump)
9699 index_expr = fold (build (MINUS_EXPR, index_type,
9700 convert (index_type, index_expr),
9701 convert (index_type, minval)));
9702 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9704 index = protect_from_queue (index, 0);
9705 do_pending_stack_adjust ();
9707 do_tablejump (index, TYPE_MODE (index_type),
9708 convert_modes (TYPE_MODE (index_type),
9709 TYPE_MODE (TREE_TYPE (range)),
9710 expand_expr (range, NULL_RTX,
9712 TYPE_UNSIGNED (TREE_TYPE (range))),
9713 table_label, default_label);
9717 /* Nonzero if the mode is a valid vector mode for this architecture.
9718 This returns nonzero even if there is no hardware support for the
9719 vector mode, but we can emulate with narrower modes. */
9722 vector_mode_valid_p (enum machine_mode mode)
9724 enum mode_class class = GET_MODE_CLASS (mode);
9725 enum machine_mode innermode;
9727 /* Doh! What's going on? */
9728 if (class != MODE_VECTOR_INT
9729 && class != MODE_VECTOR_FLOAT)
9732 /* Hardware support. Woo hoo! */
9733 if (VECTOR_MODE_SUPPORTED_P (mode))
9736 innermode = GET_MODE_INNER (mode);
9738 /* We should probably return 1 if requesting V4DI and we have no DI,
9739 but we have V2DI, but this is probably very unlikely. */
9741 /* If we have support for the inner mode, we can safely emulate it.
9742 We may not have V2DI, but me can emulate with a pair of DIs. */
9743 return mov_optab->handlers[innermode].insn_code != CODE_FOR_nothing;
9746 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
9748 const_vector_from_tree (tree exp)
9753 enum machine_mode inner, mode;
9755 mode = TYPE_MODE (TREE_TYPE (exp));
9757 if (initializer_zerop (exp))
9758 return CONST0_RTX (mode);
9760 units = GET_MODE_NUNITS (mode);
9761 inner = GET_MODE_INNER (mode);
9763 v = rtvec_alloc (units);
9765 link = TREE_VECTOR_CST_ELTS (exp);
9766 for (i = 0; link; link = TREE_CHAIN (link), ++i)
9768 elt = TREE_VALUE (link);
9770 if (TREE_CODE (elt) == REAL_CST)
9771 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
9774 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
9775 TREE_INT_CST_HIGH (elt),
9779 /* Initialize remaining elements to 0. */
9780 for (; i < units; ++i)
9781 RTVEC_ELT (v, i) = CONST0_RTX (inner);
9783 return gen_rtx_raw_CONST_VECTOR (mode, v);
9785 #include "gt-expr.h"