1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 #include "coretypes.h"
32 #include "hard-reg-set.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
44 #include "typeclass.h"
47 #include "langhooks.h"
50 #include "tree-iterator.h"
51 #include "tree-pass.h"
52 #include "tree-flow.h"
56 /* Decide whether a function's arguments should be processed
57 from first to last or from last to first.
59 They should if the stack and args grow in opposite directions, but
60 only if we have push insns. */
64 #ifndef PUSH_ARGS_REVERSED
65 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
66 #define PUSH_ARGS_REVERSED /* If it's last to first. */
72 #ifndef STACK_PUSH_CODE
73 #ifdef STACK_GROWS_DOWNWARD
74 #define STACK_PUSH_CODE PRE_DEC
76 #define STACK_PUSH_CODE PRE_INC
81 /* If this is nonzero, we do not bother generating VOLATILE
82 around volatile memory references, and we are willing to
83 output indirect addresses. If cse is to follow, we reject
84 indirect addresses so a useful potential cse is generated;
85 if it is used only once, instruction combination will produce
86 the same indirect address eventually. */
89 /* This structure is used by move_by_pieces to describe the move to
100 int explicit_inc_from;
101 unsigned HOST_WIDE_INT len;
102 HOST_WIDE_INT offset;
106 /* This structure is used by store_by_pieces to describe the clear to
109 struct store_by_pieces
115 unsigned HOST_WIDE_INT len;
116 HOST_WIDE_INT offset;
117 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
122 static rtx enqueue_insn (rtx, rtx);
123 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
125 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
126 struct move_by_pieces *);
127 static bool block_move_libcall_safe_for_call_parm (void);
128 static bool emit_block_move_via_movmem (rtx, rtx, rtx, unsigned);
129 static rtx emit_block_move_via_libcall (rtx, rtx, rtx);
130 static tree emit_block_move_libcall_fn (int);
131 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
132 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
133 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
134 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
135 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
136 struct store_by_pieces *);
137 static bool clear_storage_via_clrmem (rtx, rtx, unsigned);
138 static rtx clear_storage_via_libcall (rtx, rtx);
139 static tree clear_storage_libcall_fn (int);
140 static rtx compress_float_constant (rtx, rtx);
141 static rtx get_subtarget (rtx);
142 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
143 HOST_WIDE_INT, enum machine_mode,
144 tree, tree, int, int);
145 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
146 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
147 tree, enum machine_mode, int, tree, int);
148 static rtx var_rtx (tree);
150 static unsigned HOST_WIDE_INT highest_pow2_factor (tree);
151 static unsigned HOST_WIDE_INT highest_pow2_factor_for_target (tree, tree);
153 static int is_aligning_offset (tree, tree);
154 static rtx expand_increment (tree, int, int);
155 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
156 enum expand_modifier);
157 static rtx reduce_to_bit_field_precision (rtx, rtx, tree);
158 static rtx do_store_flag (tree, rtx, enum machine_mode, int);
160 static void emit_single_push_insn (enum machine_mode, rtx, tree);
162 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
163 static rtx const_vector_from_tree (tree);
165 /* Record for each mode whether we can move a register directly to or
166 from an object of that mode in memory. If we can't, we won't try
167 to use that mode directly when accessing a field of that mode. */
169 static char direct_load[NUM_MACHINE_MODES];
170 static char direct_store[NUM_MACHINE_MODES];
172 /* Record for each mode whether we can float-extend from memory. */
174 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
176 /* This macro is used to determine whether move_by_pieces should be called
177 to perform a structure copy. */
178 #ifndef MOVE_BY_PIECES_P
179 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
180 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
183 /* This macro is used to determine whether clear_by_pieces should be
184 called to clear storage. */
185 #ifndef CLEAR_BY_PIECES_P
186 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
187 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
190 /* This macro is used to determine whether store_by_pieces should be
191 called to "memset" storage with byte values other than zero, or
192 to "memcpy" storage when the source is a constant string. */
193 #ifndef STORE_BY_PIECES_P
194 #define STORE_BY_PIECES_P(SIZE, ALIGN) MOVE_BY_PIECES_P (SIZE, ALIGN)
197 /* This array records the insn_code of insns to perform block moves. */
198 enum insn_code movmem_optab[NUM_MACHINE_MODES];
200 /* This array records the insn_code of insns to perform block clears. */
201 enum insn_code clrmem_optab[NUM_MACHINE_MODES];
203 /* These arrays record the insn_code of two different kinds of insns
204 to perform block compares. */
205 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
206 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
208 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
210 #ifndef SLOW_UNALIGNED_ACCESS
211 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
214 /* This is run once per compilation to set up which modes can be used
215 directly in memory and to initialize the block move optab. */
218 init_expr_once (void)
221 enum machine_mode mode;
226 /* Try indexing by frame ptr and try by stack ptr.
227 It is known that on the Convex the stack ptr isn't a valid index.
228 With luck, one or the other is valid on any machine. */
229 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
230 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
232 /* A scratch register we can modify in-place below to avoid
233 useless RTL allocations. */
234 reg = gen_rtx_REG (VOIDmode, -1);
236 insn = rtx_alloc (INSN);
237 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
238 PATTERN (insn) = pat;
240 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
241 mode = (enum machine_mode) ((int) mode + 1))
245 direct_load[(int) mode] = direct_store[(int) mode] = 0;
246 PUT_MODE (mem, mode);
247 PUT_MODE (mem1, mode);
248 PUT_MODE (reg, mode);
250 /* See if there is some register that can be used in this mode and
251 directly loaded or stored from memory. */
253 if (mode != VOIDmode && mode != BLKmode)
254 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
255 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
258 if (! HARD_REGNO_MODE_OK (regno, mode))
264 SET_DEST (pat) = reg;
265 if (recog (pat, insn, &num_clobbers) >= 0)
266 direct_load[(int) mode] = 1;
268 SET_SRC (pat) = mem1;
269 SET_DEST (pat) = reg;
270 if (recog (pat, insn, &num_clobbers) >= 0)
271 direct_load[(int) mode] = 1;
274 SET_DEST (pat) = mem;
275 if (recog (pat, insn, &num_clobbers) >= 0)
276 direct_store[(int) mode] = 1;
279 SET_DEST (pat) = mem1;
280 if (recog (pat, insn, &num_clobbers) >= 0)
281 direct_store[(int) mode] = 1;
285 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
287 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
288 mode = GET_MODE_WIDER_MODE (mode))
290 enum machine_mode srcmode;
291 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
292 srcmode = GET_MODE_WIDER_MODE (srcmode))
296 ic = can_extend_p (mode, srcmode, 0);
297 if (ic == CODE_FOR_nothing)
300 PUT_MODE (mem, srcmode);
302 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
303 float_extend_from_mem[mode][srcmode] = true;
308 /* This is run at the start of compiling a function. */
313 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
316 /* Small sanity check that the queue is empty at the end of a function. */
319 finish_expr_for_function (void)
325 /* Manage the queue of increment instructions to be output
326 for POSTINCREMENT_EXPR expressions, etc. */
328 /* Queue up to increment (or change) VAR later. BODY says how:
329 BODY should be the same thing you would pass to emit_insn
330 to increment right away. It will go to emit_insn later on.
332 The value is a QUEUED expression to be used in place of VAR
333 where you want to guarantee the pre-incrementation value of VAR. */
336 enqueue_insn (rtx var, rtx body)
338 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
339 body, pending_chain);
340 return pending_chain;
343 /* Use protect_from_queue to convert a QUEUED expression
344 into something that you can put immediately into an instruction.
345 If the queued incrementation has not happened yet,
346 protect_from_queue returns the variable itself.
347 If the incrementation has happened, protect_from_queue returns a temp
348 that contains a copy of the old value of the variable.
350 Any time an rtx which might possibly be a QUEUED is to be put
351 into an instruction, it must be passed through protect_from_queue first.
352 QUEUED expressions are not meaningful in instructions.
354 Do not pass a value through protect_from_queue and then hold
355 on to it for a while before putting it in an instruction!
356 If the queue is flushed in between, incorrect code will result. */
359 protect_from_queue (rtx x, int modify)
361 RTX_CODE code = GET_CODE (x);
363 #if 0 /* A QUEUED can hang around after the queue is forced out. */
364 /* Shortcut for most common case. */
365 if (pending_chain == 0)
371 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
372 use of autoincrement. Make a copy of the contents of the memory
373 location rather than a copy of the address, but not if the value is
374 of mode BLKmode. Don't modify X in place since it might be
376 if (code == MEM && GET_MODE (x) != BLKmode
377 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
380 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
384 rtx temp = gen_reg_rtx (GET_MODE (x));
386 emit_insn_before (gen_move_insn (temp, new),
391 /* Copy the address into a pseudo, so that the returned value
392 remains correct across calls to emit_queue. */
393 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
396 /* Otherwise, recursively protect the subexpressions of all
397 the kinds of rtx's that can contain a QUEUED. */
400 rtx tem = protect_from_queue (XEXP (x, 0), 0);
401 if (tem != XEXP (x, 0))
407 else if (code == PLUS || code == MULT)
409 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
410 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
411 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
420 /* If the increment has not happened, use the variable itself. Copy it
421 into a new pseudo so that the value remains correct across calls to
423 if (QUEUED_INSN (x) == 0)
424 return copy_to_reg (QUEUED_VAR (x));
425 /* If the increment has happened and a pre-increment copy exists,
427 if (QUEUED_COPY (x) != 0)
428 return QUEUED_COPY (x);
429 /* The increment has happened but we haven't set up a pre-increment copy.
430 Set one up now, and use it. */
431 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
432 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
434 return QUEUED_COPY (x);
437 /* Return nonzero if X contains a QUEUED expression:
438 if it contains anything that will be altered by a queued increment.
439 We handle only combinations of MEM, PLUS, MINUS and MULT operators
440 since memory addresses generally contain only those. */
443 queued_subexp_p (rtx x)
445 enum rtx_code code = GET_CODE (x);
451 return queued_subexp_p (XEXP (x, 0));
455 return (queued_subexp_p (XEXP (x, 0))
456 || queued_subexp_p (XEXP (x, 1)));
462 /* Retrieve a mark on the queue. */
467 return pending_chain;
470 /* Perform all the pending incrementations that have been enqueued
471 after MARK was retrieved. If MARK is null, perform all the
472 pending incrementations. */
475 emit_insns_enqueued_after_mark (rtx mark)
479 /* The marked incrementation may have been emitted in the meantime
480 through a call to emit_queue. In this case, the mark is not valid
481 anymore so do nothing. */
482 if (mark && ! QUEUED_BODY (mark))
485 while ((p = pending_chain) != mark)
487 rtx body = QUEUED_BODY (p);
489 switch (GET_CODE (body))
497 QUEUED_INSN (p) = body;
501 #ifdef ENABLE_CHECKING
508 QUEUED_INSN (p) = emit_insn (body);
513 pending_chain = QUEUED_NEXT (p);
517 /* Perform all the pending incrementations. */
522 emit_insns_enqueued_after_mark (NULL_RTX);
525 /* Copy data from FROM to TO, where the machine modes are not the same.
526 Both modes may be integer, or both may be floating.
527 UNSIGNEDP should be nonzero if FROM is an unsigned type.
528 This causes zero-extension instead of sign-extension. */
531 convert_move (rtx to, rtx from, int unsignedp)
533 enum machine_mode to_mode = GET_MODE (to);
534 enum machine_mode from_mode = GET_MODE (from);
535 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
536 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
540 /* rtx code for making an equivalent value. */
541 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
542 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
544 to = protect_from_queue (to, 1);
545 from = protect_from_queue (from, 0);
547 if (to_real != from_real)
550 /* If the source and destination are already the same, then there's
555 /* If FROM is a SUBREG that indicates that we have already done at least
556 the required extension, strip it. We don't handle such SUBREGs as
559 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
560 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
561 >= GET_MODE_SIZE (to_mode))
562 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
563 from = gen_lowpart (to_mode, from), from_mode = to_mode;
565 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
568 if (to_mode == from_mode
569 || (from_mode == VOIDmode && CONSTANT_P (from)))
571 emit_move_insn (to, from);
575 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
577 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
580 if (VECTOR_MODE_P (to_mode))
581 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
583 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
585 emit_move_insn (to, from);
589 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
591 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
592 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
601 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
603 else if (GET_MODE_PRECISION (from_mode) > GET_MODE_PRECISION (to_mode))
608 /* Try converting directly if the insn is supported. */
610 code = tab->handlers[to_mode][from_mode].insn_code;
611 if (code != CODE_FOR_nothing)
613 emit_unop_insn (code, to, from,
614 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
618 /* Otherwise use a libcall. */
619 libcall = tab->handlers[to_mode][from_mode].libfunc;
622 /* This conversion is not implemented yet. */
626 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
628 insns = get_insns ();
630 emit_libcall_block (insns, to, value,
631 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
633 : gen_rtx_FLOAT_EXTEND (to_mode, from));
637 /* Handle pointer conversion. */ /* SPEE 900220. */
638 /* Targets are expected to provide conversion insns between PxImode and
639 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
640 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
642 enum machine_mode full_mode
643 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
645 if (trunc_optab->handlers[to_mode][full_mode].insn_code
649 if (full_mode != from_mode)
650 from = convert_to_mode (full_mode, from, unsignedp);
651 emit_unop_insn (trunc_optab->handlers[to_mode][full_mode].insn_code,
655 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
657 enum machine_mode full_mode
658 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
660 if (sext_optab->handlers[full_mode][from_mode].insn_code
664 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
666 if (to_mode == full_mode)
669 /* else proceed to integer conversions below. */
670 from_mode = full_mode;
673 /* Now both modes are integers. */
675 /* Handle expanding beyond a word. */
676 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
677 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
684 enum machine_mode lowpart_mode;
685 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
687 /* Try converting directly if the insn is supported. */
688 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
691 /* If FROM is a SUBREG, put it into a register. Do this
692 so that we always generate the same set of insns for
693 better cse'ing; if an intermediate assignment occurred,
694 we won't be doing the operation directly on the SUBREG. */
695 if (optimize > 0 && GET_CODE (from) == SUBREG)
696 from = force_reg (from_mode, from);
697 emit_unop_insn (code, to, from, equiv_code);
700 /* Next, try converting via full word. */
701 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
702 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
703 != CODE_FOR_nothing))
707 if (reg_overlap_mentioned_p (to, from))
708 from = force_reg (from_mode, from);
709 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
711 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
712 emit_unop_insn (code, to,
713 gen_lowpart (word_mode, to), equiv_code);
717 /* No special multiword conversion insn; do it by hand. */
720 /* Since we will turn this into a no conflict block, we must ensure
721 that the source does not overlap the target. */
723 if (reg_overlap_mentioned_p (to, from))
724 from = force_reg (from_mode, from);
726 /* Get a copy of FROM widened to a word, if necessary. */
727 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
728 lowpart_mode = word_mode;
730 lowpart_mode = from_mode;
732 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
734 lowpart = gen_lowpart (lowpart_mode, to);
735 emit_move_insn (lowpart, lowfrom);
737 /* Compute the value to put in each remaining word. */
739 fill_value = const0_rtx;
744 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
745 && STORE_FLAG_VALUE == -1)
747 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
749 fill_value = gen_reg_rtx (word_mode);
750 emit_insn (gen_slt (fill_value));
756 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
757 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
759 fill_value = convert_to_mode (word_mode, fill_value, 1);
763 /* Fill the remaining words. */
764 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
766 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
767 rtx subword = operand_subword (to, index, 1, to_mode);
772 if (fill_value != subword)
773 emit_move_insn (subword, fill_value);
776 insns = get_insns ();
779 emit_no_conflict_block (insns, to, from, NULL_RTX,
780 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
784 /* Truncating multi-word to a word or less. */
785 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
786 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
789 && ! MEM_VOLATILE_P (from)
790 && direct_load[(int) to_mode]
791 && ! mode_dependent_address_p (XEXP (from, 0)))
793 || GET_CODE (from) == SUBREG))
794 from = force_reg (from_mode, from);
795 convert_move (to, gen_lowpart (word_mode, from), 0);
799 /* Now follow all the conversions between integers
800 no more than a word long. */
802 /* For truncation, usually we can just refer to FROM in a narrower mode. */
803 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
804 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
805 GET_MODE_BITSIZE (from_mode)))
808 && ! MEM_VOLATILE_P (from)
809 && direct_load[(int) to_mode]
810 && ! mode_dependent_address_p (XEXP (from, 0)))
812 || GET_CODE (from) == SUBREG))
813 from = force_reg (from_mode, from);
814 if (REG_P (from) && REGNO (from) < FIRST_PSEUDO_REGISTER
815 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
816 from = copy_to_reg (from);
817 emit_move_insn (to, gen_lowpart (to_mode, from));
821 /* Handle extension. */
822 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
824 /* Convert directly if that works. */
825 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
829 from = force_not_mem (from);
831 emit_unop_insn (code, to, from, equiv_code);
836 enum machine_mode intermediate;
840 /* Search for a mode to convert via. */
841 for (intermediate = from_mode; intermediate != VOIDmode;
842 intermediate = GET_MODE_WIDER_MODE (intermediate))
843 if (((can_extend_p (to_mode, intermediate, unsignedp)
845 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
846 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
847 GET_MODE_BITSIZE (intermediate))))
848 && (can_extend_p (intermediate, from_mode, unsignedp)
849 != CODE_FOR_nothing))
851 convert_move (to, convert_to_mode (intermediate, from,
852 unsignedp), unsignedp);
856 /* No suitable intermediate mode.
857 Generate what we need with shifts. */
858 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
859 - GET_MODE_BITSIZE (from_mode), 0);
860 from = gen_lowpart (to_mode, force_reg (from_mode, from));
861 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
863 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
866 emit_move_insn (to, tmp);
871 /* Support special truncate insns for certain modes. */
872 if (trunc_optab->handlers[to_mode][from_mode].insn_code != CODE_FOR_nothing)
874 emit_unop_insn (trunc_optab->handlers[to_mode][from_mode].insn_code,
879 /* Handle truncation of volatile memrefs, and so on;
880 the things that couldn't be truncated directly,
881 and for which there was no special instruction.
883 ??? Code above formerly short-circuited this, for most integer
884 mode pairs, with a force_reg in from_mode followed by a recursive
885 call to this routine. Appears always to have been wrong. */
886 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
888 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
889 emit_move_insn (to, temp);
893 /* Mode combination is not recognized. */
897 /* Return an rtx for a value that would result
898 from converting X to mode MODE.
899 Both X and MODE may be floating, or both integer.
900 UNSIGNEDP is nonzero if X is an unsigned value.
901 This can be done by referring to a part of X in place
902 or by copying to a new temporary with conversion.
904 This function *must not* call protect_from_queue
905 except when putting X into an insn (in which case convert_move does it). */
908 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
910 return convert_modes (mode, VOIDmode, x, unsignedp);
913 /* Return an rtx for a value that would result
914 from converting X from mode OLDMODE to mode MODE.
915 Both modes may be floating, or both integer.
916 UNSIGNEDP is nonzero if X is an unsigned value.
918 This can be done by referring to a part of X in place
919 or by copying to a new temporary with conversion.
921 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
923 This function *must not* call protect_from_queue
924 except when putting X into an insn (in which case convert_move does it). */
927 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
931 /* If FROM is a SUBREG that indicates that we have already done at least
932 the required extension, strip it. */
934 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
935 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
936 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
937 x = gen_lowpart (mode, x);
939 if (GET_MODE (x) != VOIDmode)
940 oldmode = GET_MODE (x);
945 /* There is one case that we must handle specially: If we are converting
946 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
947 we are to interpret the constant as unsigned, gen_lowpart will do
948 the wrong if the constant appears negative. What we want to do is
949 make the high-order word of the constant zero, not all ones. */
951 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
952 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
953 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
955 HOST_WIDE_INT val = INTVAL (x);
957 if (oldmode != VOIDmode
958 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
960 int width = GET_MODE_BITSIZE (oldmode);
962 /* We need to zero extend VAL. */
963 val &= ((HOST_WIDE_INT) 1 << width) - 1;
966 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
969 /* We can do this with a gen_lowpart if both desired and current modes
970 are integer, and this is either a constant integer, a register, or a
971 non-volatile MEM. Except for the constant case where MODE is no
972 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
974 if ((GET_CODE (x) == CONST_INT
975 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
976 || (GET_MODE_CLASS (mode) == MODE_INT
977 && GET_MODE_CLASS (oldmode) == MODE_INT
978 && (GET_CODE (x) == CONST_DOUBLE
979 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
980 && ((MEM_P (x) && ! MEM_VOLATILE_P (x)
981 && direct_load[(int) mode])
983 && (! HARD_REGISTER_P (x)
984 || HARD_REGNO_MODE_OK (REGNO (x), mode))
985 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
986 GET_MODE_BITSIZE (GET_MODE (x)))))))))
988 /* ?? If we don't know OLDMODE, we have to assume here that
989 X does not need sign- or zero-extension. This may not be
990 the case, but it's the best we can do. */
991 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
992 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
994 HOST_WIDE_INT val = INTVAL (x);
995 int width = GET_MODE_BITSIZE (oldmode);
997 /* We must sign or zero-extend in this case. Start by
998 zero-extending, then sign extend if we need to. */
999 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1001 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1002 val |= (HOST_WIDE_INT) (-1) << width;
1004 return gen_int_mode (val, mode);
1007 return gen_lowpart (mode, x);
1010 /* Converting from integer constant into mode is always equivalent to an
1011 subreg operation. */
1012 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
1014 if (GET_MODE_BITSIZE (mode) != GET_MODE_BITSIZE (oldmode))
1016 return simplify_gen_subreg (mode, x, oldmode, 0);
1019 temp = gen_reg_rtx (mode);
1020 convert_move (temp, x, unsignedp);
1024 /* STORE_MAX_PIECES is the number of bytes at a time that we can
1025 store efficiently. Due to internal GCC limitations, this is
1026 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1027 for an immediate constant. */
1029 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1031 /* Determine whether the LEN bytes can be moved by using several move
1032 instructions. Return nonzero if a call to move_by_pieces should
1036 can_move_by_pieces (unsigned HOST_WIDE_INT len,
1037 unsigned int align ATTRIBUTE_UNUSED)
1039 return MOVE_BY_PIECES_P (len, align);
1042 /* Generate several move instructions to copy LEN bytes from block FROM to
1043 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1044 and TO through protect_from_queue before calling.
1046 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1047 used to push FROM to the stack.
1049 ALIGN is maximum stack alignment we can assume.
1051 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
1052 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
1056 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
1057 unsigned int align, int endp)
1059 struct move_by_pieces data;
1060 rtx to_addr, from_addr = XEXP (from, 0);
1061 unsigned int max_size = MOVE_MAX_PIECES + 1;
1062 enum machine_mode mode = VOIDmode, tmode;
1063 enum insn_code icode;
1065 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
1068 data.from_addr = from_addr;
1071 to_addr = XEXP (to, 0);
1074 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1075 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1077 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1084 #ifdef STACK_GROWS_DOWNWARD
1090 data.to_addr = to_addr;
1093 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1094 || GET_CODE (from_addr) == POST_INC
1095 || GET_CODE (from_addr) == POST_DEC);
1097 data.explicit_inc_from = 0;
1098 data.explicit_inc_to = 0;
1099 if (data.reverse) data.offset = len;
1102 /* If copying requires more than two move insns,
1103 copy addresses to registers (to make displacements shorter)
1104 and use post-increment if available. */
1105 if (!(data.autinc_from && data.autinc_to)
1106 && move_by_pieces_ninsns (len, align) > 2)
1108 /* Find the mode of the largest move... */
1109 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1110 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1111 if (GET_MODE_SIZE (tmode) < max_size)
1114 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1116 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1117 data.autinc_from = 1;
1118 data.explicit_inc_from = -1;
1120 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1122 data.from_addr = copy_addr_to_reg (from_addr);
1123 data.autinc_from = 1;
1124 data.explicit_inc_from = 1;
1126 if (!data.autinc_from && CONSTANT_P (from_addr))
1127 data.from_addr = copy_addr_to_reg (from_addr);
1128 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1130 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1132 data.explicit_inc_to = -1;
1134 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1136 data.to_addr = copy_addr_to_reg (to_addr);
1138 data.explicit_inc_to = 1;
1140 if (!data.autinc_to && CONSTANT_P (to_addr))
1141 data.to_addr = copy_addr_to_reg (to_addr);
1144 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1145 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1146 align = MOVE_MAX * BITS_PER_UNIT;
1148 /* First move what we can in the largest integer mode, then go to
1149 successively smaller modes. */
1151 while (max_size > 1)
1153 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1154 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1155 if (GET_MODE_SIZE (tmode) < max_size)
1158 if (mode == VOIDmode)
1161 icode = mov_optab->handlers[(int) mode].insn_code;
1162 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1163 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1165 max_size = GET_MODE_SIZE (mode);
1168 /* The code above should have handled everything. */
1182 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1183 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1185 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1188 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1195 to1 = adjust_address (data.to, QImode, data.offset);
1203 /* Return number of insns required to move L bytes by pieces.
1204 ALIGN (in bits) is maximum alignment we can assume. */
1206 static unsigned HOST_WIDE_INT
1207 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align)
1209 unsigned HOST_WIDE_INT n_insns = 0;
1210 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1212 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1213 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1214 align = MOVE_MAX * BITS_PER_UNIT;
1216 while (max_size > 1)
1218 enum machine_mode mode = VOIDmode, tmode;
1219 enum insn_code icode;
1221 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1222 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1223 if (GET_MODE_SIZE (tmode) < max_size)
1226 if (mode == VOIDmode)
1229 icode = mov_optab->handlers[(int) mode].insn_code;
1230 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1231 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1233 max_size = GET_MODE_SIZE (mode);
1241 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1242 with move instructions for mode MODE. GENFUN is the gen_... function
1243 to make a move insn for that mode. DATA has all the other info. */
1246 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1247 struct move_by_pieces *data)
1249 unsigned int size = GET_MODE_SIZE (mode);
1250 rtx to1 = NULL_RTX, from1;
1252 while (data->len >= size)
1255 data->offset -= size;
1259 if (data->autinc_to)
1260 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1263 to1 = adjust_address (data->to, mode, data->offset);
1266 if (data->autinc_from)
1267 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1270 from1 = adjust_address (data->from, mode, data->offset);
1272 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1273 emit_insn (gen_add2_insn (data->to_addr,
1274 GEN_INT (-(HOST_WIDE_INT)size)));
1275 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1276 emit_insn (gen_add2_insn (data->from_addr,
1277 GEN_INT (-(HOST_WIDE_INT)size)));
1280 emit_insn ((*genfun) (to1, from1));
1283 #ifdef PUSH_ROUNDING
1284 emit_single_push_insn (mode, from1, NULL);
1290 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1291 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1292 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1293 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1295 if (! data->reverse)
1296 data->offset += size;
1302 /* Emit code to move a block Y to a block X. This may be done with
1303 string-move instructions, with multiple scalar move instructions,
1304 or with a library call.
1306 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1307 SIZE is an rtx that says how long they are.
1308 ALIGN is the maximum alignment we can assume they have.
1309 METHOD describes what kind of copy this is, and what mechanisms may be used.
1311 Return the address of the new block, if memcpy is called and returns it,
1315 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1323 case BLOCK_OP_NORMAL:
1324 may_use_call = true;
1327 case BLOCK_OP_CALL_PARM:
1328 may_use_call = block_move_libcall_safe_for_call_parm ();
1330 /* Make inhibit_defer_pop nonzero around the library call
1331 to force it to pop the arguments right away. */
1335 case BLOCK_OP_NO_LIBCALL:
1336 may_use_call = false;
1343 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1345 x = protect_from_queue (x, 1);
1346 y = protect_from_queue (y, 0);
1347 size = protect_from_queue (size, 0);
1356 /* Make sure we've got BLKmode addresses; store_one_arg can decide that
1357 block copy is more efficient for other large modes, e.g. DCmode. */
1358 x = adjust_address (x, BLKmode, 0);
1359 y = adjust_address (y, BLKmode, 0);
1361 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1362 can be incorrect is coming from __builtin_memcpy. */
1363 if (GET_CODE (size) == CONST_INT)
1365 if (INTVAL (size) == 0)
1368 x = shallow_copy_rtx (x);
1369 y = shallow_copy_rtx (y);
1370 set_mem_size (x, size);
1371 set_mem_size (y, size);
1374 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1375 move_by_pieces (x, y, INTVAL (size), align, 0);
1376 else if (emit_block_move_via_movmem (x, y, size, align))
1378 else if (may_use_call)
1379 retval = emit_block_move_via_libcall (x, y, size);
1381 emit_block_move_via_loop (x, y, size, align);
1383 if (method == BLOCK_OP_CALL_PARM)
1389 /* A subroutine of emit_block_move. Returns true if calling the
1390 block move libcall will not clobber any parameters which may have
1391 already been placed on the stack. */
1394 block_move_libcall_safe_for_call_parm (void)
1396 /* If arguments are pushed on the stack, then they're safe. */
1400 /* If registers go on the stack anyway, any argument is sure to clobber
1401 an outgoing argument. */
1402 #if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1404 tree fn = emit_block_move_libcall_fn (false);
1406 if (REG_PARM_STACK_SPACE (fn) != 0)
1411 /* If any argument goes in memory, then it might clobber an outgoing
1414 CUMULATIVE_ARGS args_so_far;
1417 fn = emit_block_move_libcall_fn (false);
1418 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
1420 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1421 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1423 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1424 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1425 if (!tmp || !REG_P (tmp))
1427 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1430 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1436 /* A subroutine of emit_block_move. Expand a movmem pattern;
1437 return true if successful. */
1440 emit_block_move_via_movmem (rtx x, rtx y, rtx size, unsigned int align)
1442 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1443 int save_volatile_ok = volatile_ok;
1444 enum machine_mode mode;
1446 /* Since this is a move insn, we don't care about volatility. */
1449 /* Try the most limited insn first, because there's no point
1450 including more than one in the machine description unless
1451 the more limited one has some advantage. */
1453 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1454 mode = GET_MODE_WIDER_MODE (mode))
1456 enum insn_code code = movmem_optab[(int) mode];
1457 insn_operand_predicate_fn pred;
1459 if (code != CODE_FOR_nothing
1460 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1461 here because if SIZE is less than the mode mask, as it is
1462 returned by the macro, it will definitely be less than the
1463 actual mode mask. */
1464 && ((GET_CODE (size) == CONST_INT
1465 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1466 <= (GET_MODE_MASK (mode) >> 1)))
1467 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1468 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1469 || (*pred) (x, BLKmode))
1470 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1471 || (*pred) (y, BLKmode))
1472 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1473 || (*pred) (opalign, VOIDmode)))
1476 rtx last = get_last_insn ();
1479 op2 = convert_to_mode (mode, size, 1);
1480 pred = insn_data[(int) code].operand[2].predicate;
1481 if (pred != 0 && ! (*pred) (op2, mode))
1482 op2 = copy_to_mode_reg (mode, op2);
1484 /* ??? When called via emit_block_move_for_call, it'd be
1485 nice if there were some way to inform the backend, so
1486 that it doesn't fail the expansion because it thinks
1487 emitting the libcall would be more efficient. */
1489 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1493 volatile_ok = save_volatile_ok;
1497 delete_insns_since (last);
1501 volatile_ok = save_volatile_ok;
1505 /* A subroutine of emit_block_move. Expand a call to memcpy.
1506 Return the return value from memcpy, 0 otherwise. */
1509 emit_block_move_via_libcall (rtx dst, rtx src, rtx size)
1511 rtx dst_addr, src_addr;
1512 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1513 enum machine_mode size_mode;
1516 /* DST, SRC, or SIZE may have been passed through protect_from_queue.
1518 It is unsafe to save the value generated by protect_from_queue and reuse
1519 it later. Consider what happens if emit_queue is called before the
1520 return value from protect_from_queue is used.
1522 Expansion of the CALL_EXPR below will call emit_queue before we are
1523 finished emitting RTL for argument setup. So if we are not careful we
1524 could get the wrong value for an argument.
1526 To avoid this problem we go ahead and emit code to copy the addresses of
1527 DST and SRC and SIZE into new pseudos.
1529 Note this is not strictly needed for library calls since they do not call
1530 emit_queue before loading their arguments. However, we may need to have
1531 library calls call emit_queue in the future since failing to do so could
1532 cause problems for targets which define SMALL_REGISTER_CLASSES and pass
1533 arguments in registers. */
1535 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1536 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1538 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1539 src_addr = convert_memory_address (ptr_mode, src_addr);
1541 dst_tree = make_tree (ptr_type_node, dst_addr);
1542 src_tree = make_tree (ptr_type_node, src_addr);
1544 size_mode = TYPE_MODE (sizetype);
1546 size = convert_to_mode (size_mode, size, 1);
1547 size = copy_to_mode_reg (size_mode, size);
1549 /* It is incorrect to use the libcall calling conventions to call
1550 memcpy in this context. This could be a user call to memcpy and
1551 the user may wish to examine the return value from memcpy. For
1552 targets where libcalls and normal calls have different conventions
1553 for returning pointers, we could end up generating incorrect code. */
1555 size_tree = make_tree (sizetype, size);
1557 fn = emit_block_move_libcall_fn (true);
1558 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1559 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1560 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1562 /* Now we have to build up the CALL_EXPR itself. */
1563 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1564 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1565 call_expr, arg_list, NULL_TREE);
1567 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1569 /* If we are initializing a readonly value, show the above call clobbered
1570 it. Otherwise, a load from it may erroneously be hoisted from a loop, or
1571 the delay slot scheduler might overlook conflicts and take nasty
1573 if (RTX_UNCHANGING_P (dst))
1574 add_function_usage_to
1575 (last_call_insn (), gen_rtx_EXPR_LIST (VOIDmode,
1576 gen_rtx_CLOBBER (VOIDmode, dst),
1582 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1583 for the function we use for block copies. The first time FOR_CALL
1584 is true, we call assemble_external. */
1586 static GTY(()) tree block_move_fn;
1589 init_block_move_fn (const char *asmspec)
1595 fn = get_identifier ("memcpy");
1596 args = build_function_type_list (ptr_type_node, ptr_type_node,
1597 const_ptr_type_node, sizetype,
1600 fn = build_decl (FUNCTION_DECL, fn, args);
1601 DECL_EXTERNAL (fn) = 1;
1602 TREE_PUBLIC (fn) = 1;
1603 DECL_ARTIFICIAL (fn) = 1;
1604 TREE_NOTHROW (fn) = 1;
1611 SET_DECL_RTL (block_move_fn, NULL_RTX);
1612 SET_DECL_ASSEMBLER_NAME (block_move_fn, get_identifier (asmspec));
1617 emit_block_move_libcall_fn (int for_call)
1619 static bool emitted_extern;
1622 init_block_move_fn (NULL);
1624 if (for_call && !emitted_extern)
1626 emitted_extern = true;
1627 make_decl_rtl (block_move_fn, NULL);
1628 assemble_external (block_move_fn);
1631 return block_move_fn;
1634 /* A subroutine of emit_block_move. Copy the data via an explicit
1635 loop. This is used only when libcalls are forbidden. */
1636 /* ??? It'd be nice to copy in hunks larger than QImode. */
1639 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1640 unsigned int align ATTRIBUTE_UNUSED)
1642 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1643 enum machine_mode iter_mode;
1645 iter_mode = GET_MODE (size);
1646 if (iter_mode == VOIDmode)
1647 iter_mode = word_mode;
1649 top_label = gen_label_rtx ();
1650 cmp_label = gen_label_rtx ();
1651 iter = gen_reg_rtx (iter_mode);
1653 emit_move_insn (iter, const0_rtx);
1655 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1656 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1657 do_pending_stack_adjust ();
1659 emit_jump (cmp_label);
1660 emit_label (top_label);
1662 tmp = convert_modes (Pmode, iter_mode, iter, true);
1663 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1664 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1665 x = change_address (x, QImode, x_addr);
1666 y = change_address (y, QImode, y_addr);
1668 emit_move_insn (x, y);
1670 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1671 true, OPTAB_LIB_WIDEN);
1673 emit_move_insn (iter, tmp);
1675 emit_label (cmp_label);
1677 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1681 /* Copy all or part of a value X into registers starting at REGNO.
1682 The number of registers to be filled is NREGS. */
1685 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1688 #ifdef HAVE_load_multiple
1696 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1697 x = validize_mem (force_const_mem (mode, x));
1699 /* See if the machine can do this with a load multiple insn. */
1700 #ifdef HAVE_load_multiple
1701 if (HAVE_load_multiple)
1703 last = get_last_insn ();
1704 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1712 delete_insns_since (last);
1716 for (i = 0; i < nregs; i++)
1717 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1718 operand_subword_force (x, i, mode));
1721 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1722 The number of registers to be filled is NREGS. */
1725 move_block_from_reg (int regno, rtx x, int nregs)
1732 /* See if the machine can do this with a store multiple insn. */
1733 #ifdef HAVE_store_multiple
1734 if (HAVE_store_multiple)
1736 rtx last = get_last_insn ();
1737 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1745 delete_insns_since (last);
1749 for (i = 0; i < nregs; i++)
1751 rtx tem = operand_subword (x, i, 1, BLKmode);
1756 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1760 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1761 ORIG, where ORIG is a non-consecutive group of registers represented by
1762 a PARALLEL. The clone is identical to the original except in that the
1763 original set of registers is replaced by a new set of pseudo registers.
1764 The new set has the same modes as the original set. */
1767 gen_group_rtx (rtx orig)
1772 if (GET_CODE (orig) != PARALLEL)
1775 length = XVECLEN (orig, 0);
1776 tmps = alloca (sizeof (rtx) * length);
1778 /* Skip a NULL entry in first slot. */
1779 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1784 for (; i < length; i++)
1786 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1787 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1789 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1792 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1795 /* Emit code to move a block ORIG_SRC of type TYPE to a block DST,
1796 where DST is non-consecutive registers represented by a PARALLEL.
1797 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1801 emit_group_load (rtx dst, rtx orig_src, tree type ATTRIBUTE_UNUSED, int ssize)
1806 if (GET_CODE (dst) != PARALLEL)
1809 /* Check for a NULL entry, used to indicate that the parameter goes
1810 both on the stack and in registers. */
1811 if (XEXP (XVECEXP (dst, 0, 0), 0))
1816 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
1818 /* Process the pieces. */
1819 for (i = start; i < XVECLEN (dst, 0); i++)
1821 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1822 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1823 unsigned int bytelen = GET_MODE_SIZE (mode);
1826 /* Handle trailing fragments that run over the size of the struct. */
1827 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1829 /* Arrange to shift the fragment to where it belongs.
1830 extract_bit_field loads to the lsb of the reg. */
1832 #ifdef BLOCK_REG_PADDING
1833 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1834 == (BYTES_BIG_ENDIAN ? upward : downward)
1839 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1840 bytelen = ssize - bytepos;
1845 /* If we won't be loading directly from memory, protect the real source
1846 from strange tricks we might play; but make sure that the source can
1847 be loaded directly into the destination. */
1849 if (!MEM_P (orig_src)
1850 && (!CONSTANT_P (orig_src)
1851 || (GET_MODE (orig_src) != mode
1852 && GET_MODE (orig_src) != VOIDmode)))
1854 if (GET_MODE (orig_src) == VOIDmode)
1855 src = gen_reg_rtx (mode);
1857 src = gen_reg_rtx (GET_MODE (orig_src));
1859 emit_move_insn (src, orig_src);
1862 /* Optimize the access just a bit. */
1864 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1865 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1866 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1867 && bytelen == GET_MODE_SIZE (mode))
1869 tmps[i] = gen_reg_rtx (mode);
1870 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1872 else if (GET_CODE (src) == CONCAT)
1874 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1875 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1877 if ((bytepos == 0 && bytelen == slen0)
1878 || (bytepos != 0 && bytepos + bytelen <= slen))
1880 /* The following assumes that the concatenated objects all
1881 have the same size. In this case, a simple calculation
1882 can be used to determine the object and the bit field
1884 tmps[i] = XEXP (src, bytepos / slen0);
1885 if (! CONSTANT_P (tmps[i])
1886 && (!REG_P (tmps[i]) || GET_MODE (tmps[i]) != mode))
1887 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1888 (bytepos % slen0) * BITS_PER_UNIT,
1889 1, NULL_RTX, mode, mode);
1891 else if (bytepos == 0)
1893 rtx mem = assign_stack_temp (GET_MODE (src), slen, 0);
1894 emit_move_insn (mem, src);
1895 tmps[i] = adjust_address (mem, mode, 0);
1900 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1901 SIMD register, which is currently broken. While we get GCC
1902 to emit proper RTL for these cases, let's dump to memory. */
1903 else if (VECTOR_MODE_P (GET_MODE (dst))
1906 int slen = GET_MODE_SIZE (GET_MODE (src));
1909 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1910 emit_move_insn (mem, src);
1911 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1913 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1914 && XVECLEN (dst, 0) > 1)
1915 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1916 else if (CONSTANT_P (src)
1917 || (REG_P (src) && GET_MODE (src) == mode))
1920 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1921 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1925 tmps[i] = expand_shift (LSHIFT_EXPR, mode, tmps[i],
1926 build_int_2 (shift, 0), tmps[i], 0);
1931 /* Copy the extracted pieces into the proper (probable) hard regs. */
1932 for (i = start; i < XVECLEN (dst, 0); i++)
1933 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
1936 /* Emit code to move a block SRC to block DST, where SRC and DST are
1937 non-consecutive groups of registers, each represented by a PARALLEL. */
1940 emit_group_move (rtx dst, rtx src)
1944 if (GET_CODE (src) != PARALLEL
1945 || GET_CODE (dst) != PARALLEL
1946 || XVECLEN (src, 0) != XVECLEN (dst, 0))
1949 /* Skip first entry if NULL. */
1950 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1951 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1952 XEXP (XVECEXP (src, 0, i), 0));
1955 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1956 where SRC is non-consecutive registers represented by a PARALLEL.
1957 SSIZE represents the total size of block ORIG_DST, or -1 if not
1961 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1966 if (GET_CODE (src) != PARALLEL)
1969 /* Check for a NULL entry, used to indicate that the parameter goes
1970 both on the stack and in registers. */
1971 if (XEXP (XVECEXP (src, 0, 0), 0))
1976 tmps = alloca (sizeof (rtx) * XVECLEN (src, 0));
1978 /* Copy the (probable) hard regs into pseudos. */
1979 for (i = start; i < XVECLEN (src, 0); i++)
1981 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1982 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1983 emit_move_insn (tmps[i], reg);
1987 /* If we won't be storing directly into memory, protect the real destination
1988 from strange tricks we might play. */
1990 if (GET_CODE (dst) == PARALLEL)
1994 /* We can get a PARALLEL dst if there is a conditional expression in
1995 a return statement. In that case, the dst and src are the same,
1996 so no action is necessary. */
1997 if (rtx_equal_p (dst, src))
2000 /* It is unclear if we can ever reach here, but we may as well handle
2001 it. Allocate a temporary, and split this into a store/load to/from
2004 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2005 emit_group_store (temp, src, type, ssize);
2006 emit_group_load (dst, temp, type, ssize);
2009 else if (!MEM_P (dst) && GET_CODE (dst) != CONCAT)
2011 dst = gen_reg_rtx (GET_MODE (orig_dst));
2012 /* Make life a bit easier for combine. */
2013 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
2016 /* Process the pieces. */
2017 for (i = start; i < XVECLEN (src, 0); i++)
2019 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2020 enum machine_mode mode = GET_MODE (tmps[i]);
2021 unsigned int bytelen = GET_MODE_SIZE (mode);
2024 /* Handle trailing fragments that run over the size of the struct. */
2025 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2027 /* store_bit_field always takes its value from the lsb.
2028 Move the fragment to the lsb if it's not already there. */
2030 #ifdef BLOCK_REG_PADDING
2031 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2032 == (BYTES_BIG_ENDIAN ? upward : downward)
2038 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2039 tmps[i] = expand_shift (RSHIFT_EXPR, mode, tmps[i],
2040 build_int_2 (shift, 0), tmps[i], 0);
2042 bytelen = ssize - bytepos;
2045 if (GET_CODE (dst) == CONCAT)
2047 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2048 dest = XEXP (dst, 0);
2049 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2051 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2052 dest = XEXP (dst, 1);
2054 else if (bytepos == 0 && XVECLEN (src, 0))
2056 dest = assign_stack_temp (GET_MODE (dest),
2057 GET_MODE_SIZE (GET_MODE (dest)), 0);
2058 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2067 /* Optimize the access just a bit. */
2069 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2070 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2071 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2072 && bytelen == GET_MODE_SIZE (mode))
2073 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2075 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2081 /* Copy from the pseudo into the (probable) hard reg. */
2082 if (orig_dst != dst)
2083 emit_move_insn (orig_dst, dst);
2086 /* Generate code to copy a BLKmode object of TYPE out of a
2087 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2088 is null, a stack temporary is created. TGTBLK is returned.
2090 The purpose of this routine is to handle functions that return
2091 BLKmode structures in registers. Some machines (the PA for example)
2092 want to return all small structures in registers regardless of the
2093 structure's alignment. */
2096 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2098 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2099 rtx src = NULL, dst = NULL;
2100 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2101 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2105 tgtblk = assign_temp (build_qualified_type (type,
2107 | TYPE_QUAL_CONST)),
2109 preserve_temp_slots (tgtblk);
2112 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2113 into a new pseudo which is a full word. */
2115 if (GET_MODE (srcreg) != BLKmode
2116 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2117 srcreg = convert_to_mode (word_mode, srcreg, TYPE_UNSIGNED (type));
2119 /* If the structure doesn't take up a whole number of words, see whether
2120 SRCREG is padded on the left or on the right. If it's on the left,
2121 set PADDING_CORRECTION to the number of bits to skip.
2123 In most ABIs, the structure will be returned at the least end of
2124 the register, which translates to right padding on little-endian
2125 targets and left padding on big-endian targets. The opposite
2126 holds if the structure is returned at the most significant
2127 end of the register. */
2128 if (bytes % UNITS_PER_WORD != 0
2129 && (targetm.calls.return_in_msb (type)
2131 : BYTES_BIG_ENDIAN))
2133 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2135 /* Copy the structure BITSIZE bites at a time.
2137 We could probably emit more efficient code for machines which do not use
2138 strict alignment, but it doesn't seem worth the effort at the current
2140 for (bitpos = 0, xbitpos = padding_correction;
2141 bitpos < bytes * BITS_PER_UNIT;
2142 bitpos += bitsize, xbitpos += bitsize)
2144 /* We need a new source operand each time xbitpos is on a
2145 word boundary and when xbitpos == padding_correction
2146 (the first time through). */
2147 if (xbitpos % BITS_PER_WORD == 0
2148 || xbitpos == padding_correction)
2149 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2152 /* We need a new destination operand each time bitpos is on
2154 if (bitpos % BITS_PER_WORD == 0)
2155 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2157 /* Use xbitpos for the source extraction (right justified) and
2158 xbitpos for the destination store (left justified). */
2159 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2160 extract_bit_field (src, bitsize,
2161 xbitpos % BITS_PER_WORD, 1,
2162 NULL_RTX, word_mode, word_mode));
2168 /* Add a USE expression for REG to the (possibly empty) list pointed
2169 to by CALL_FUSAGE. REG must denote a hard register. */
2172 use_reg (rtx *call_fusage, rtx reg)
2175 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2179 = gen_rtx_EXPR_LIST (VOIDmode,
2180 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2183 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2184 starting at REGNO. All of these registers must be hard registers. */
2187 use_regs (rtx *call_fusage, int regno, int nregs)
2191 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2194 for (i = 0; i < nregs; i++)
2195 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2198 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2199 PARALLEL REGS. This is for calls that pass values in multiple
2200 non-contiguous locations. The Irix 6 ABI has examples of this. */
2203 use_group_regs (rtx *call_fusage, rtx regs)
2207 for (i = 0; i < XVECLEN (regs, 0); i++)
2209 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2211 /* A NULL entry means the parameter goes both on the stack and in
2212 registers. This can also be a MEM for targets that pass values
2213 partially on the stack and partially in registers. */
2214 if (reg != 0 && REG_P (reg))
2215 use_reg (call_fusage, reg);
2220 /* Determine whether the LEN bytes generated by CONSTFUN can be
2221 stored to memory using several move instructions. CONSTFUNDATA is
2222 a pointer which will be passed as argument in every CONSTFUN call.
2223 ALIGN is maximum alignment we can assume. Return nonzero if a
2224 call to store_by_pieces should succeed. */
2227 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2228 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2229 void *constfundata, unsigned int align)
2231 unsigned HOST_WIDE_INT max_size, l;
2232 HOST_WIDE_INT offset = 0;
2233 enum machine_mode mode, tmode;
2234 enum insn_code icode;
2241 if (! STORE_BY_PIECES_P (len, align))
2244 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2245 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2246 align = MOVE_MAX * BITS_PER_UNIT;
2248 /* We would first store what we can in the largest integer mode, then go to
2249 successively smaller modes. */
2252 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2257 max_size = STORE_MAX_PIECES + 1;
2258 while (max_size > 1)
2260 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2261 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2262 if (GET_MODE_SIZE (tmode) < max_size)
2265 if (mode == VOIDmode)
2268 icode = mov_optab->handlers[(int) mode].insn_code;
2269 if (icode != CODE_FOR_nothing
2270 && align >= GET_MODE_ALIGNMENT (mode))
2272 unsigned int size = GET_MODE_SIZE (mode);
2279 cst = (*constfun) (constfundata, offset, mode);
2280 if (!LEGITIMATE_CONSTANT_P (cst))
2290 max_size = GET_MODE_SIZE (mode);
2293 /* The code above should have handled everything. */
2301 /* Generate several move instructions to store LEN bytes generated by
2302 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2303 pointer which will be passed as argument in every CONSTFUN call.
2304 ALIGN is maximum alignment we can assume.
2305 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2306 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2310 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2311 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2312 void *constfundata, unsigned int align, int endp)
2314 struct store_by_pieces data;
2323 if (! STORE_BY_PIECES_P (len, align))
2325 to = protect_from_queue (to, 1);
2326 data.constfun = constfun;
2327 data.constfundata = constfundata;
2330 store_by_pieces_1 (&data, align);
2341 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2342 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2344 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2347 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2354 to1 = adjust_address (data.to, QImode, data.offset);
2362 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2363 rtx with BLKmode). The caller must pass TO through protect_from_queue
2364 before calling. ALIGN is maximum alignment we can assume. */
2367 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2369 struct store_by_pieces data;
2374 data.constfun = clear_by_pieces_1;
2375 data.constfundata = NULL;
2378 store_by_pieces_1 (&data, align);
2381 /* Callback routine for clear_by_pieces.
2382 Return const0_rtx unconditionally. */
2385 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2386 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2387 enum machine_mode mode ATTRIBUTE_UNUSED)
2392 /* Subroutine of clear_by_pieces and store_by_pieces.
2393 Generate several move instructions to store LEN bytes of block TO. (A MEM
2394 rtx with BLKmode). The caller must pass TO through protect_from_queue
2395 before calling. ALIGN is maximum alignment we can assume. */
2398 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2399 unsigned int align ATTRIBUTE_UNUSED)
2401 rtx to_addr = XEXP (data->to, 0);
2402 unsigned HOST_WIDE_INT max_size = STORE_MAX_PIECES + 1;
2403 enum machine_mode mode = VOIDmode, tmode;
2404 enum insn_code icode;
2407 data->to_addr = to_addr;
2409 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2410 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2412 data->explicit_inc_to = 0;
2414 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2416 data->offset = data->len;
2418 /* If storing requires more than two move insns,
2419 copy addresses to registers (to make displacements shorter)
2420 and use post-increment if available. */
2421 if (!data->autinc_to
2422 && move_by_pieces_ninsns (data->len, align) > 2)
2424 /* Determine the main mode we'll be using. */
2425 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2426 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2427 if (GET_MODE_SIZE (tmode) < max_size)
2430 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2432 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2433 data->autinc_to = 1;
2434 data->explicit_inc_to = -1;
2437 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2438 && ! data->autinc_to)
2440 data->to_addr = copy_addr_to_reg (to_addr);
2441 data->autinc_to = 1;
2442 data->explicit_inc_to = 1;
2445 if ( !data->autinc_to && CONSTANT_P (to_addr))
2446 data->to_addr = copy_addr_to_reg (to_addr);
2449 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2450 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2451 align = MOVE_MAX * BITS_PER_UNIT;
2453 /* First store what we can in the largest integer mode, then go to
2454 successively smaller modes. */
2456 while (max_size > 1)
2458 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2459 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2460 if (GET_MODE_SIZE (tmode) < max_size)
2463 if (mode == VOIDmode)
2466 icode = mov_optab->handlers[(int) mode].insn_code;
2467 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2468 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2470 max_size = GET_MODE_SIZE (mode);
2473 /* The code above should have handled everything. */
2478 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2479 with move instructions for mode MODE. GENFUN is the gen_... function
2480 to make a move insn for that mode. DATA has all the other info. */
2483 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2484 struct store_by_pieces *data)
2486 unsigned int size = GET_MODE_SIZE (mode);
2489 while (data->len >= size)
2492 data->offset -= size;
2494 if (data->autinc_to)
2495 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2498 to1 = adjust_address (data->to, mode, data->offset);
2500 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2501 emit_insn (gen_add2_insn (data->to_addr,
2502 GEN_INT (-(HOST_WIDE_INT) size)));
2504 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2505 emit_insn ((*genfun) (to1, cst));
2507 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2508 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2510 if (! data->reverse)
2511 data->offset += size;
2517 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2518 its length in bytes. */
2521 clear_storage (rtx object, rtx size)
2524 unsigned int align = (MEM_P (object) ? MEM_ALIGN (object)
2525 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2527 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2528 just move a zero. Otherwise, do this a piece at a time. */
2529 if (GET_MODE (object) != BLKmode
2530 && GET_CODE (size) == CONST_INT
2531 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
2532 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2535 object = protect_from_queue (object, 1);
2536 size = protect_from_queue (size, 0);
2538 if (size == const0_rtx)
2540 else if (GET_CODE (size) == CONST_INT
2541 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2542 clear_by_pieces (object, INTVAL (size), align);
2543 else if (clear_storage_via_clrmem (object, size, align))
2546 retval = clear_storage_via_libcall (object, size);
2552 /* A subroutine of clear_storage. Expand a clrmem pattern;
2553 return true if successful. */
2556 clear_storage_via_clrmem (rtx object, rtx size, unsigned int align)
2558 /* Try the most limited insn first, because there's no point
2559 including more than one in the machine description unless
2560 the more limited one has some advantage. */
2562 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2563 enum machine_mode mode;
2565 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2566 mode = GET_MODE_WIDER_MODE (mode))
2568 enum insn_code code = clrmem_optab[(int) mode];
2569 insn_operand_predicate_fn pred;
2571 if (code != CODE_FOR_nothing
2572 /* We don't need MODE to be narrower than
2573 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2574 the mode mask, as it is returned by the macro, it will
2575 definitely be less than the actual mode mask. */
2576 && ((GET_CODE (size) == CONST_INT
2577 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2578 <= (GET_MODE_MASK (mode) >> 1)))
2579 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2580 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2581 || (*pred) (object, BLKmode))
2582 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2583 || (*pred) (opalign, VOIDmode)))
2586 rtx last = get_last_insn ();
2589 op1 = convert_to_mode (mode, size, 1);
2590 pred = insn_data[(int) code].operand[1].predicate;
2591 if (pred != 0 && ! (*pred) (op1, mode))
2592 op1 = copy_to_mode_reg (mode, op1);
2594 pat = GEN_FCN ((int) code) (object, op1, opalign);
2601 delete_insns_since (last);
2608 /* A subroutine of clear_storage. Expand a call to memset.
2609 Return the return value of memset, 0 otherwise. */
2612 clear_storage_via_libcall (rtx object, rtx size)
2614 tree call_expr, arg_list, fn, object_tree, size_tree;
2615 enum machine_mode size_mode;
2618 /* OBJECT or SIZE may have been passed through protect_from_queue.
2620 It is unsafe to save the value generated by protect_from_queue
2621 and reuse it later. Consider what happens if emit_queue is
2622 called before the return value from protect_from_queue is used.
2624 Expansion of the CALL_EXPR below will call emit_queue before
2625 we are finished emitting RTL for argument setup. So if we are
2626 not careful we could get the wrong value for an argument.
2628 To avoid this problem we go ahead and emit code to copy OBJECT
2629 and SIZE into new pseudos.
2631 Note this is not strictly needed for library calls since they
2632 do not call emit_queue before loading their arguments. However,
2633 we may need to have library calls call emit_queue in the future
2634 since failing to do so could cause problems for targets which
2635 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2637 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2639 size_mode = TYPE_MODE (sizetype);
2640 size = convert_to_mode (size_mode, size, 1);
2641 size = copy_to_mode_reg (size_mode, size);
2643 /* It is incorrect to use the libcall calling conventions to call
2644 memset in this context. This could be a user call to memset and
2645 the user may wish to examine the return value from memset. For
2646 targets where libcalls and normal calls have different conventions
2647 for returning pointers, we could end up generating incorrect code. */
2649 object_tree = make_tree (ptr_type_node, object);
2650 size_tree = make_tree (sizetype, size);
2652 fn = clear_storage_libcall_fn (true);
2653 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
2654 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
2655 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
2657 /* Now we have to build up the CALL_EXPR itself. */
2658 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2659 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2660 call_expr, arg_list, NULL_TREE);
2662 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2664 /* If we are initializing a readonly value, show the above call
2665 clobbered it. Otherwise, a load from it may erroneously be
2666 hoisted from a loop. */
2667 if (RTX_UNCHANGING_P (object))
2668 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
2673 /* A subroutine of clear_storage_via_libcall. Create the tree node
2674 for the function we use for block clears. The first time FOR_CALL
2675 is true, we call assemble_external. */
2677 static GTY(()) tree block_clear_fn;
2680 init_block_clear_fn (const char *asmspec)
2682 if (!block_clear_fn)
2686 fn = get_identifier ("memset");
2687 args = build_function_type_list (ptr_type_node, ptr_type_node,
2688 integer_type_node, sizetype,
2691 fn = build_decl (FUNCTION_DECL, fn, args);
2692 DECL_EXTERNAL (fn) = 1;
2693 TREE_PUBLIC (fn) = 1;
2694 DECL_ARTIFICIAL (fn) = 1;
2695 TREE_NOTHROW (fn) = 1;
2697 block_clear_fn = fn;
2702 SET_DECL_RTL (block_clear_fn, NULL_RTX);
2703 SET_DECL_ASSEMBLER_NAME (block_clear_fn, get_identifier (asmspec));
2708 clear_storage_libcall_fn (int for_call)
2710 static bool emitted_extern;
2712 if (!block_clear_fn)
2713 init_block_clear_fn (NULL);
2715 if (for_call && !emitted_extern)
2717 emitted_extern = true;
2718 make_decl_rtl (block_clear_fn, NULL);
2719 assemble_external (block_clear_fn);
2722 return block_clear_fn;
2725 /* Generate code to copy Y into X.
2726 Both Y and X must have the same mode, except that
2727 Y can be a constant with VOIDmode.
2728 This mode cannot be BLKmode; use emit_block_move for that.
2730 Return the last instruction emitted. */
2733 emit_move_insn (rtx x, rtx y)
2735 enum machine_mode mode = GET_MODE (x);
2736 rtx y_cst = NULL_RTX;
2739 x = protect_from_queue (x, 1);
2740 y = protect_from_queue (y, 0);
2742 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2748 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
2749 && (last_insn = compress_float_constant (x, y)))
2754 if (!LEGITIMATE_CONSTANT_P (y))
2756 y = force_const_mem (mode, y);
2758 /* If the target's cannot_force_const_mem prevented the spill,
2759 assume that the target's move expanders will also take care
2760 of the non-legitimate constant. */
2766 /* If X or Y are memory references, verify that their addresses are valid
2769 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2770 && ! push_operand (x, GET_MODE (x)))
2772 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2773 x = validize_mem (x);
2776 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2778 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2779 y = validize_mem (y);
2781 if (mode == BLKmode)
2784 last_insn = emit_move_insn_1 (x, y);
2786 if (y_cst && REG_P (x)
2787 && (set = single_set (last_insn)) != NULL_RTX
2788 && SET_DEST (set) == x
2789 && ! rtx_equal_p (y_cst, SET_SRC (set)))
2790 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
2795 /* Low level part of emit_move_insn.
2796 Called just like emit_move_insn, but assumes X and Y
2797 are basically valid. */
2800 emit_move_insn_1 (rtx x, rtx y)
2802 enum machine_mode mode = GET_MODE (x);
2803 enum machine_mode submode;
2804 enum mode_class class = GET_MODE_CLASS (mode);
2806 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
2809 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2811 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2813 /* Expand complex moves by moving real part and imag part, if possible. */
2814 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2815 && BLKmode != (submode = GET_MODE_INNER (mode))
2816 && (mov_optab->handlers[(int) submode].insn_code
2817 != CODE_FOR_nothing))
2819 /* Don't split destination if it is a stack push. */
2820 int stack = push_operand (x, GET_MODE (x));
2822 #ifdef PUSH_ROUNDING
2823 /* In case we output to the stack, but the size is smaller than the
2824 machine can push exactly, we need to use move instructions. */
2826 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
2827 != GET_MODE_SIZE (submode)))
2830 HOST_WIDE_INT offset1, offset2;
2832 /* Do not use anti_adjust_stack, since we don't want to update
2833 stack_pointer_delta. */
2834 temp = expand_binop (Pmode,
2835 #ifdef STACK_GROWS_DOWNWARD
2843 (GET_MODE_SIZE (GET_MODE (x)))),
2844 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
2846 if (temp != stack_pointer_rtx)
2847 emit_move_insn (stack_pointer_rtx, temp);
2849 #ifdef STACK_GROWS_DOWNWARD
2851 offset2 = GET_MODE_SIZE (submode);
2853 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2854 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2855 + GET_MODE_SIZE (submode));
2858 emit_move_insn (change_address (x, submode,
2859 gen_rtx_PLUS (Pmode,
2861 GEN_INT (offset1))),
2862 gen_realpart (submode, y));
2863 emit_move_insn (change_address (x, submode,
2864 gen_rtx_PLUS (Pmode,
2866 GEN_INT (offset2))),
2867 gen_imagpart (submode, y));
2871 /* If this is a stack, push the highpart first, so it
2872 will be in the argument order.
2874 In that case, change_address is used only to convert
2875 the mode, not to change the address. */
2878 /* Note that the real part always precedes the imag part in memory
2879 regardless of machine's endianness. */
2880 #ifdef STACK_GROWS_DOWNWARD
2881 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2882 gen_imagpart (submode, y));
2883 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2884 gen_realpart (submode, y));
2886 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2887 gen_realpart (submode, y));
2888 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2889 gen_imagpart (submode, y));
2894 rtx realpart_x, realpart_y;
2895 rtx imagpart_x, imagpart_y;
2897 /* If this is a complex value with each part being smaller than a
2898 word, the usual calling sequence will likely pack the pieces into
2899 a single register. Unfortunately, SUBREG of hard registers only
2900 deals in terms of words, so we have a problem converting input
2901 arguments to the CONCAT of two registers that is used elsewhere
2902 for complex values. If this is before reload, we can copy it into
2903 memory and reload. FIXME, we should see about using extract and
2904 insert on integer registers, but complex short and complex char
2905 variables should be rarely used. */
2906 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2907 && (reload_in_progress | reload_completed) == 0)
2910 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2912 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2914 if (packed_dest_p || packed_src_p)
2916 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2917 ? MODE_FLOAT : MODE_INT);
2919 enum machine_mode reg_mode
2920 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2922 if (reg_mode != BLKmode)
2924 rtx mem = assign_stack_temp (reg_mode,
2925 GET_MODE_SIZE (mode), 0);
2926 rtx cmem = adjust_address (mem, mode, 0);
2930 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2932 emit_move_insn_1 (cmem, y);
2933 return emit_move_insn_1 (sreg, mem);
2937 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2939 emit_move_insn_1 (mem, sreg);
2940 return emit_move_insn_1 (x, cmem);
2946 realpart_x = gen_realpart (submode, x);
2947 realpart_y = gen_realpart (submode, y);
2948 imagpart_x = gen_imagpart (submode, x);
2949 imagpart_y = gen_imagpart (submode, y);
2951 /* Show the output dies here. This is necessary for SUBREGs
2952 of pseudos since we cannot track their lifetimes correctly;
2953 hard regs shouldn't appear here except as return values.
2954 We never want to emit such a clobber after reload. */
2956 && ! (reload_in_progress || reload_completed)
2957 && (GET_CODE (realpart_x) == SUBREG
2958 || GET_CODE (imagpart_x) == SUBREG))
2959 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2961 emit_move_insn (realpart_x, realpart_y);
2962 emit_move_insn (imagpart_x, imagpart_y);
2965 return get_last_insn ();
2968 /* Handle MODE_CC modes: If we don't have a special move insn for this mode,
2969 find a mode to do it in. If we have a movcc, use it. Otherwise,
2970 find the MODE_INT mode of the same width. */
2971 else if (GET_MODE_CLASS (mode) == MODE_CC
2972 && mov_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
2974 enum insn_code insn_code;
2975 enum machine_mode tmode = VOIDmode;
2979 && mov_optab->handlers[(int) CCmode].insn_code != CODE_FOR_nothing)
2982 for (tmode = QImode; tmode != VOIDmode;
2983 tmode = GET_MODE_WIDER_MODE (tmode))
2984 if (GET_MODE_SIZE (tmode) == GET_MODE_SIZE (mode))
2987 if (tmode == VOIDmode)
2990 /* Get X and Y in TMODE. We can't use gen_lowpart here because it
2991 may call change_address which is not appropriate if we were
2992 called when a reload was in progress. We don't have to worry
2993 about changing the address since the size in bytes is supposed to
2994 be the same. Copy the MEM to change the mode and move any
2995 substitutions from the old MEM to the new one. */
2997 if (reload_in_progress)
2999 x = gen_lowpart_common (tmode, x1);
3000 if (x == 0 && MEM_P (x1))
3002 x = adjust_address_nv (x1, tmode, 0);
3003 copy_replacements (x1, x);
3006 y = gen_lowpart_common (tmode, y1);
3007 if (y == 0 && MEM_P (y1))
3009 y = adjust_address_nv (y1, tmode, 0);
3010 copy_replacements (y1, y);
3015 x = gen_lowpart (tmode, x);
3016 y = gen_lowpart (tmode, y);
3019 insn_code = mov_optab->handlers[(int) tmode].insn_code;
3020 return emit_insn (GEN_FCN (insn_code) (x, y));
3023 /* Try using a move pattern for the corresponding integer mode. This is
3024 only safe when simplify_subreg can convert MODE constants into integer
3025 constants. At present, it can only do this reliably if the value
3026 fits within a HOST_WIDE_INT. */
3027 else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3028 && (submode = int_mode_for_mode (mode)) != BLKmode
3029 && mov_optab->handlers[submode].insn_code != CODE_FOR_nothing)
3030 return emit_insn (GEN_FCN (mov_optab->handlers[submode].insn_code)
3031 (simplify_gen_subreg (submode, x, mode, 0),
3032 simplify_gen_subreg (submode, y, mode, 0)));
3034 /* This will handle any multi-word or full-word mode that lacks a move_insn
3035 pattern. However, you will get better code if you define such patterns,
3036 even if they must turn into multiple assembler instructions. */
3037 else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
3044 #ifdef PUSH_ROUNDING
3046 /* If X is a push on the stack, do the push now and replace
3047 X with a reference to the stack pointer. */
3048 if (push_operand (x, GET_MODE (x)))
3053 /* Do not use anti_adjust_stack, since we don't want to update
3054 stack_pointer_delta. */
3055 temp = expand_binop (Pmode,
3056 #ifdef STACK_GROWS_DOWNWARD
3064 (GET_MODE_SIZE (GET_MODE (x)))),
3065 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3067 if (temp != stack_pointer_rtx)
3068 emit_move_insn (stack_pointer_rtx, temp);
3070 code = GET_CODE (XEXP (x, 0));
3072 /* Just hope that small offsets off SP are OK. */
3073 if (code == POST_INC)
3074 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3075 GEN_INT (-((HOST_WIDE_INT)
3076 GET_MODE_SIZE (GET_MODE (x)))));
3077 else if (code == POST_DEC)
3078 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3079 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3081 temp = stack_pointer_rtx;
3083 x = change_address (x, VOIDmode, temp);
3087 /* If we are in reload, see if either operand is a MEM whose address
3088 is scheduled for replacement. */
3089 if (reload_in_progress && MEM_P (x)
3090 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3091 x = replace_equiv_address_nv (x, inner);
3092 if (reload_in_progress && MEM_P (y)
3093 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3094 y = replace_equiv_address_nv (y, inner);
3100 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3103 rtx xpart = operand_subword (x, i, 1, mode);
3104 rtx ypart = operand_subword (y, i, 1, mode);
3106 /* If we can't get a part of Y, put Y into memory if it is a
3107 constant. Otherwise, force it into a register. If we still
3108 can't get a part of Y, abort. */
3109 if (ypart == 0 && CONSTANT_P (y))
3111 y = force_const_mem (mode, y);
3112 ypart = operand_subword (y, i, 1, mode);
3114 else if (ypart == 0)
3115 ypart = operand_subword_force (y, i, mode);
3117 if (xpart == 0 || ypart == 0)
3120 need_clobber |= (GET_CODE (xpart) == SUBREG);
3122 last_insn = emit_move_insn (xpart, ypart);
3128 /* Show the output dies here. This is necessary for SUBREGs
3129 of pseudos since we cannot track their lifetimes correctly;
3130 hard regs shouldn't appear here except as return values.
3131 We never want to emit such a clobber after reload. */
3133 && ! (reload_in_progress || reload_completed)
3134 && need_clobber != 0)
3135 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3145 /* If Y is representable exactly in a narrower mode, and the target can
3146 perform the extension directly from constant or memory, then emit the
3147 move as an extension. */
3150 compress_float_constant (rtx x, rtx y)
3152 enum machine_mode dstmode = GET_MODE (x);
3153 enum machine_mode orig_srcmode = GET_MODE (y);
3154 enum machine_mode srcmode;
3157 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3159 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3160 srcmode != orig_srcmode;
3161 srcmode = GET_MODE_WIDER_MODE (srcmode))
3164 rtx trunc_y, last_insn;
3166 /* Skip if the target can't extend this way. */
3167 ic = can_extend_p (dstmode, srcmode, 0);
3168 if (ic == CODE_FOR_nothing)
3171 /* Skip if the narrowed value isn't exact. */
3172 if (! exact_real_truncate (srcmode, &r))
3175 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3177 if (LEGITIMATE_CONSTANT_P (trunc_y))
3179 /* Skip if the target needs extra instructions to perform
3181 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3184 else if (float_extend_from_mem[dstmode][srcmode])
3185 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
3189 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3190 last_insn = get_last_insn ();
3193 set_unique_reg_note (last_insn, REG_EQUAL, y);
3201 /* Pushing data onto the stack. */
3203 /* Push a block of length SIZE (perhaps variable)
3204 and return an rtx to address the beginning of the block.
3205 Note that it is not possible for the value returned to be a QUEUED.
3206 The value may be virtual_outgoing_args_rtx.
3208 EXTRA is the number of bytes of padding to push in addition to SIZE.
3209 BELOW nonzero means this padding comes at low addresses;
3210 otherwise, the padding comes at high addresses. */
3213 push_block (rtx size, int extra, int below)
3217 size = convert_modes (Pmode, ptr_mode, size, 1);
3218 if (CONSTANT_P (size))
3219 anti_adjust_stack (plus_constant (size, extra));
3220 else if (REG_P (size) && extra == 0)
3221 anti_adjust_stack (size);
3224 temp = copy_to_mode_reg (Pmode, size);
3226 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3227 temp, 0, OPTAB_LIB_WIDEN);
3228 anti_adjust_stack (temp);
3231 #ifndef STACK_GROWS_DOWNWARD
3237 temp = virtual_outgoing_args_rtx;
3238 if (extra != 0 && below)
3239 temp = plus_constant (temp, extra);
3243 if (GET_CODE (size) == CONST_INT)
3244 temp = plus_constant (virtual_outgoing_args_rtx,
3245 -INTVAL (size) - (below ? 0 : extra));
3246 else if (extra != 0 && !below)
3247 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3248 negate_rtx (Pmode, plus_constant (size, extra)));
3250 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3251 negate_rtx (Pmode, size));
3254 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3257 #ifdef PUSH_ROUNDING
3259 /* Emit single push insn. */
3262 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3265 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3267 enum insn_code icode;
3268 insn_operand_predicate_fn pred;
3270 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3271 /* If there is push pattern, use it. Otherwise try old way of throwing
3272 MEM representing push operation to move expander. */
3273 icode = push_optab->handlers[(int) mode].insn_code;
3274 if (icode != CODE_FOR_nothing)
3276 if (((pred = insn_data[(int) icode].operand[0].predicate)
3277 && !((*pred) (x, mode))))
3278 x = force_reg (mode, x);
3279 emit_insn (GEN_FCN (icode) (x));
3282 if (GET_MODE_SIZE (mode) == rounded_size)
3283 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3284 /* If we are to pad downward, adjust the stack pointer first and
3285 then store X into the stack location using an offset. This is
3286 because emit_move_insn does not know how to pad; it does not have
3288 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3290 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3291 HOST_WIDE_INT offset;
3293 emit_move_insn (stack_pointer_rtx,
3294 expand_binop (Pmode,
3295 #ifdef STACK_GROWS_DOWNWARD
3301 GEN_INT (rounded_size),
3302 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3304 offset = (HOST_WIDE_INT) padding_size;
3305 #ifdef STACK_GROWS_DOWNWARD
3306 if (STACK_PUSH_CODE == POST_DEC)
3307 /* We have already decremented the stack pointer, so get the
3309 offset += (HOST_WIDE_INT) rounded_size;
3311 if (STACK_PUSH_CODE == POST_INC)
3312 /* We have already incremented the stack pointer, so get the
3314 offset -= (HOST_WIDE_INT) rounded_size;
3316 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3320 #ifdef STACK_GROWS_DOWNWARD
3321 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3322 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3323 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3325 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3326 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3327 GEN_INT (rounded_size));
3329 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3332 dest = gen_rtx_MEM (mode, dest_addr);
3336 set_mem_attributes (dest, type, 1);
3338 if (flag_optimize_sibling_calls)
3339 /* Function incoming arguments may overlap with sibling call
3340 outgoing arguments and we cannot allow reordering of reads
3341 from function arguments with stores to outgoing arguments
3342 of sibling calls. */
3343 set_mem_alias_set (dest, 0);
3345 emit_move_insn (dest, x);
3349 /* Generate code to push X onto the stack, assuming it has mode MODE and
3351 MODE is redundant except when X is a CONST_INT (since they don't
3353 SIZE is an rtx for the size of data to be copied (in bytes),
3354 needed only if X is BLKmode.
3356 ALIGN (in bits) is maximum alignment we can assume.
3358 If PARTIAL and REG are both nonzero, then copy that many of the first
3359 words of X into registers starting with REG, and push the rest of X.
3360 The amount of space pushed is decreased by PARTIAL words,
3361 rounded *down* to a multiple of PARM_BOUNDARY.
3362 REG must be a hard register in this case.
3363 If REG is zero but PARTIAL is not, take any all others actions for an
3364 argument partially in registers, but do not actually load any
3367 EXTRA is the amount in bytes of extra space to leave next to this arg.
3368 This is ignored if an argument block has already been allocated.
3370 On a machine that lacks real push insns, ARGS_ADDR is the address of
3371 the bottom of the argument block for this call. We use indexing off there
3372 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3373 argument block has not been preallocated.
3375 ARGS_SO_FAR is the size of args previously pushed for this call.
3377 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3378 for arguments passed in registers. If nonzero, it will be the number
3379 of bytes required. */
3382 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3383 unsigned int align, int partial, rtx reg, int extra,
3384 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3388 enum direction stack_direction
3389 #ifdef STACK_GROWS_DOWNWARD
3395 /* Decide where to pad the argument: `downward' for below,
3396 `upward' for above, or `none' for don't pad it.
3397 Default is below for small data on big-endian machines; else above. */
3398 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3400 /* Invert direction if stack is post-decrement.
3402 if (STACK_PUSH_CODE == POST_DEC)
3403 if (where_pad != none)
3404 where_pad = (where_pad == downward ? upward : downward);
3406 xinner = x = protect_from_queue (x, 0);
3408 if (mode == BLKmode)
3410 /* Copy a block into the stack, entirely or partially. */
3413 int used = partial * UNITS_PER_WORD;
3417 if (reg && GET_CODE (reg) == PARALLEL)
3419 /* Use the size of the elt to compute offset. */
3420 rtx elt = XEXP (XVECEXP (reg, 0, 0), 0);
3421 used = partial * GET_MODE_SIZE (GET_MODE (elt));
3422 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3425 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3432 /* USED is now the # of bytes we need not copy to the stack
3433 because registers will take care of them. */
3436 xinner = adjust_address (xinner, BLKmode, used);
3438 /* If the partial register-part of the arg counts in its stack size,
3439 skip the part of stack space corresponding to the registers.
3440 Otherwise, start copying to the beginning of the stack space,
3441 by setting SKIP to 0. */
3442 skip = (reg_parm_stack_space == 0) ? 0 : used;
3444 #ifdef PUSH_ROUNDING
3445 /* Do it with several push insns if that doesn't take lots of insns
3446 and if there is no difficulty with push insns that skip bytes
3447 on the stack for alignment purposes. */
3450 && GET_CODE (size) == CONST_INT
3452 && MEM_ALIGN (xinner) >= align
3453 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3454 /* Here we avoid the case of a structure whose weak alignment
3455 forces many pushes of a small amount of data,
3456 and such small pushes do rounding that causes trouble. */
3457 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3458 || align >= BIGGEST_ALIGNMENT
3459 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3460 == (align / BITS_PER_UNIT)))
3461 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3463 /* Push padding now if padding above and stack grows down,
3464 or if padding below and stack grows up.
3465 But if space already allocated, this has already been done. */
3466 if (extra && args_addr == 0
3467 && where_pad != none && where_pad != stack_direction)
3468 anti_adjust_stack (GEN_INT (extra));
3470 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3473 #endif /* PUSH_ROUNDING */
3477 /* Otherwise make space on the stack and copy the data
3478 to the address of that space. */
3480 /* Deduct words put into registers from the size we must copy. */
3483 if (GET_CODE (size) == CONST_INT)
3484 size = GEN_INT (INTVAL (size) - used);
3486 size = expand_binop (GET_MODE (size), sub_optab, size,
3487 GEN_INT (used), NULL_RTX, 0,
3491 /* Get the address of the stack space.
3492 In this case, we do not deal with EXTRA separately.
3493 A single stack adjust will do. */
3496 temp = push_block (size, extra, where_pad == downward);
3499 else if (GET_CODE (args_so_far) == CONST_INT)
3500 temp = memory_address (BLKmode,
3501 plus_constant (args_addr,
3502 skip + INTVAL (args_so_far)));
3504 temp = memory_address (BLKmode,
3505 plus_constant (gen_rtx_PLUS (Pmode,
3510 if (!ACCUMULATE_OUTGOING_ARGS)
3512 /* If the source is referenced relative to the stack pointer,
3513 copy it to another register to stabilize it. We do not need
3514 to do this if we know that we won't be changing sp. */
3516 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3517 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3518 temp = copy_to_reg (temp);
3521 target = gen_rtx_MEM (BLKmode, temp);
3525 set_mem_attributes (target, type, 1);
3526 /* Function incoming arguments may overlap with sibling call
3527 outgoing arguments and we cannot allow reordering of reads
3528 from function arguments with stores to outgoing arguments
3529 of sibling calls. */
3530 set_mem_alias_set (target, 0);
3533 /* ALIGN may well be better aligned than TYPE, e.g. due to
3534 PARM_BOUNDARY. Assume the caller isn't lying. */
3535 set_mem_align (target, align);
3537 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3540 else if (partial > 0)
3542 /* Scalar partly in registers. */
3544 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3547 /* # words of start of argument
3548 that we must make space for but need not store. */
3549 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3550 int args_offset = INTVAL (args_so_far);
3553 /* Push padding now if padding above and stack grows down,
3554 or if padding below and stack grows up.
3555 But if space already allocated, this has already been done. */
3556 if (extra && args_addr == 0
3557 && where_pad != none && where_pad != stack_direction)
3558 anti_adjust_stack (GEN_INT (extra));
3560 /* If we make space by pushing it, we might as well push
3561 the real data. Otherwise, we can leave OFFSET nonzero
3562 and leave the space uninitialized. */
3566 /* Now NOT_STACK gets the number of words that we don't need to
3567 allocate on the stack. */
3568 not_stack = partial - offset;
3570 /* If the partial register-part of the arg counts in its stack size,
3571 skip the part of stack space corresponding to the registers.
3572 Otherwise, start copying to the beginning of the stack space,
3573 by setting SKIP to 0. */
3574 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3576 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3577 x = validize_mem (force_const_mem (mode, x));
3579 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3580 SUBREGs of such registers are not allowed. */
3581 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3582 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3583 x = copy_to_reg (x);
3585 /* Loop over all the words allocated on the stack for this arg. */
3586 /* We can do it by words, because any scalar bigger than a word
3587 has a size a multiple of a word. */
3588 #ifndef PUSH_ARGS_REVERSED
3589 for (i = not_stack; i < size; i++)
3591 for (i = size - 1; i >= not_stack; i--)
3593 if (i >= not_stack + offset)
3594 emit_push_insn (operand_subword_force (x, i, mode),
3595 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3597 GEN_INT (args_offset + ((i - not_stack + skip)
3599 reg_parm_stack_space, alignment_pad);
3606 /* Push padding now if padding above and stack grows down,
3607 or if padding below and stack grows up.
3608 But if space already allocated, this has already been done. */
3609 if (extra && args_addr == 0
3610 && where_pad != none && where_pad != stack_direction)
3611 anti_adjust_stack (GEN_INT (extra));
3613 #ifdef PUSH_ROUNDING
3614 if (args_addr == 0 && PUSH_ARGS)
3615 emit_single_push_insn (mode, x, type);
3619 if (GET_CODE (args_so_far) == CONST_INT)
3621 = memory_address (mode,
3622 plus_constant (args_addr,
3623 INTVAL (args_so_far)));
3625 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3627 dest = gen_rtx_MEM (mode, addr);
3630 set_mem_attributes (dest, type, 1);
3631 /* Function incoming arguments may overlap with sibling call
3632 outgoing arguments and we cannot allow reordering of reads
3633 from function arguments with stores to outgoing arguments
3634 of sibling calls. */
3635 set_mem_alias_set (dest, 0);
3638 emit_move_insn (dest, x);
3642 /* If part should go in registers, copy that part
3643 into the appropriate registers. Do this now, at the end,
3644 since mem-to-mem copies above may do function calls. */
3645 if (partial > 0 && reg != 0)
3647 /* Handle calls that pass values in multiple non-contiguous locations.
3648 The Irix 6 ABI has examples of this. */
3649 if (GET_CODE (reg) == PARALLEL)
3650 emit_group_load (reg, x, type, -1);
3652 move_block_to_reg (REGNO (reg), x, partial, mode);
3655 if (extra && args_addr == 0 && where_pad == stack_direction)
3656 anti_adjust_stack (GEN_INT (extra));
3658 if (alignment_pad && args_addr == 0)
3659 anti_adjust_stack (alignment_pad);
3662 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3666 get_subtarget (rtx x)
3669 /* Only registers can be subtargets. */
3671 /* If the register is readonly, it can't be set more than once. */
3672 || RTX_UNCHANGING_P (x)
3673 /* Don't use hard regs to avoid extending their life. */
3674 || REGNO (x) < FIRST_PSEUDO_REGISTER
3675 /* Avoid subtargets inside loops,
3676 since they hide some invariant expressions. */
3677 || preserve_subexpressions_p ())
3681 /* Expand an assignment that stores the value of FROM into TO.
3682 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3683 (This may contain a QUEUED rtx;
3684 if the value is constant, this rtx is a constant.)
3685 Otherwise, the returned value is NULL_RTX. */
3688 expand_assignment (tree to, tree from, int want_value)
3693 /* Don't crash if the lhs of the assignment was erroneous. */
3695 if (TREE_CODE (to) == ERROR_MARK)
3697 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3698 return want_value ? result : NULL_RTX;
3701 /* Assignment of a structure component needs special treatment
3702 if the structure component's rtx is not simply a MEM.
3703 Assignment of an array element at a constant index, and assignment of
3704 an array element in an unaligned packed structure field, has the same
3707 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3708 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF
3709 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
3711 enum machine_mode mode1;
3712 HOST_WIDE_INT bitsize, bitpos;
3720 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3721 &unsignedp, &volatilep);
3723 /* If we are going to use store_bit_field and extract_bit_field,
3724 make sure to_rtx will be safe for multiple use. */
3726 if (mode1 == VOIDmode && want_value)
3727 tem = stabilize_reference (tem);
3729 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3733 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
3735 if (!MEM_P (to_rtx))
3738 #ifdef POINTERS_EXTEND_UNSIGNED
3739 if (GET_MODE (offset_rtx) != Pmode)
3740 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
3742 if (GET_MODE (offset_rtx) != ptr_mode)
3743 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3746 /* A constant address in TO_RTX can have VOIDmode, we must not try
3747 to call force_reg for that case. Avoid that case. */
3749 && GET_MODE (to_rtx) == BLKmode
3750 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3752 && (bitpos % bitsize) == 0
3753 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3754 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
3756 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3760 to_rtx = offset_address (to_rtx, offset_rtx,
3761 highest_pow2_factor_for_target (to,
3767 /* If the field is at offset zero, we could have been given the
3768 DECL_RTX of the parent struct. Don't munge it. */
3769 to_rtx = shallow_copy_rtx (to_rtx);
3771 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
3774 /* Deal with volatile and readonly fields. The former is only done
3775 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3776 if (volatilep && MEM_P (to_rtx))
3778 if (to_rtx == orig_to_rtx)
3779 to_rtx = copy_rtx (to_rtx);
3780 MEM_VOLATILE_P (to_rtx) = 1;
3783 if (TREE_CODE (to) == COMPONENT_REF
3784 && TREE_READONLY (TREE_OPERAND (to, 1))
3785 /* We can't assert that a MEM won't be set more than once
3786 if the component is not addressable because another
3787 non-addressable component may be referenced by the same MEM. */
3788 && ! (MEM_P (to_rtx) && ! can_address_p (to)))
3790 if (to_rtx == orig_to_rtx)
3791 to_rtx = copy_rtx (to_rtx);
3792 RTX_UNCHANGING_P (to_rtx) = 1;
3795 if (MEM_P (to_rtx) && ! can_address_p (to))
3797 if (to_rtx == orig_to_rtx)
3798 to_rtx = copy_rtx (to_rtx);
3799 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3802 /* Disabled temporarily. GET_MODE (to_rtx) is often not the right
3804 while (0 && mode1 == VOIDmode && !want_value
3805 && bitpos + bitsize <= BITS_PER_WORD
3806 && bitsize < BITS_PER_WORD
3807 && GET_MODE_BITSIZE (GET_MODE (to_rtx)) <= BITS_PER_WORD
3808 && !TREE_SIDE_EFFECTS (to)
3809 && !TREE_THIS_VOLATILE (to))
3813 HOST_WIDE_INT count = bitpos;
3818 if (TREE_CODE (TREE_TYPE (src)) != INTEGER_TYPE
3819 || TREE_CODE_CLASS (TREE_CODE (src)) != '2')
3822 op0 = TREE_OPERAND (src, 0);
3823 op1 = TREE_OPERAND (src, 1);
3826 if (! operand_equal_p (to, op0, 0))
3829 if (BYTES_BIG_ENDIAN)
3830 count = GET_MODE_BITSIZE (GET_MODE (to_rtx)) - bitpos - bitsize;
3832 /* Special case some bitfield op= exp. */
3833 switch (TREE_CODE (src))
3840 /* For now, just optimize the case of the topmost bitfield
3841 where we don't need to do any masking and also
3842 1 bit bitfields where xor can be used.
3843 We might win by one instruction for the other bitfields
3844 too if insv/extv instructions aren't used, so that
3845 can be added later. */
3846 if (count + bitsize != GET_MODE_BITSIZE (GET_MODE (to_rtx))
3847 && (bitsize != 1 || TREE_CODE (op1) != INTEGER_CST))
3849 value = expand_expr (op1, NULL_RTX, VOIDmode, 0);
3850 value = protect_from_queue (value, 0);
3851 to_rtx = protect_from_queue (to_rtx, 1);
3852 binop = TREE_CODE (src) == PLUS_EXPR ? add_optab : sub_optab;
3854 && count + bitsize != GET_MODE_BITSIZE (GET_MODE (to_rtx)))
3856 value = expand_and (GET_MODE (to_rtx), value, const1_rtx,
3860 value = expand_shift (LSHIFT_EXPR, GET_MODE (to_rtx),
3861 value, build_int_2 (count, 0),
3863 result = expand_binop (GET_MODE (to_rtx), binop, to_rtx,
3864 value, to_rtx, 1, OPTAB_WIDEN);
3865 if (result != to_rtx)
3866 emit_move_insn (to_rtx, result);
3877 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3879 /* Spurious cast for HPUX compiler. */
3880 ? ((enum machine_mode)
3881 TYPE_MODE (TREE_TYPE (to)))
3883 unsignedp, TREE_TYPE (tem), get_alias_set (to));
3885 preserve_temp_slots (result);
3889 /* If the value is meaningful, convert RESULT to the proper mode.
3890 Otherwise, return nothing. */
3891 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3892 TYPE_MODE (TREE_TYPE (from)),
3894 TYPE_UNSIGNED (TREE_TYPE (to)))
3898 /* If the rhs is a function call and its value is not an aggregate,
3899 call the function before we start to compute the lhs.
3900 This is needed for correct code for cases such as
3901 val = setjmp (buf) on machines where reference to val
3902 requires loading up part of an address in a separate insn.
3904 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3905 since it might be a promoted variable where the zero- or sign- extension
3906 needs to be done. Handling this in the normal way is safe because no
3907 computation is done before the call. */
3908 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
3909 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3910 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3911 && REG_P (DECL_RTL (to))))
3916 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3918 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3920 /* Handle calls that return values in multiple non-contiguous locations.
3921 The Irix 6 ABI has examples of this. */
3922 if (GET_CODE (to_rtx) == PARALLEL)
3923 emit_group_load (to_rtx, value, TREE_TYPE (from),
3924 int_size_in_bytes (TREE_TYPE (from)));
3925 else if (GET_MODE (to_rtx) == BLKmode)
3926 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
3929 if (POINTER_TYPE_P (TREE_TYPE (to)))
3930 value = convert_memory_address (GET_MODE (to_rtx), value);
3931 emit_move_insn (to_rtx, value);
3933 preserve_temp_slots (to_rtx);
3936 return want_value ? to_rtx : NULL_RTX;
3939 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3940 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3943 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3945 /* Don't move directly into a return register. */
3946 if (TREE_CODE (to) == RESULT_DECL
3947 && (REG_P (to_rtx) || GET_CODE (to_rtx) == PARALLEL))
3952 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3954 if (GET_CODE (to_rtx) == PARALLEL)
3955 emit_group_load (to_rtx, temp, TREE_TYPE (from),
3956 int_size_in_bytes (TREE_TYPE (from)));
3958 emit_move_insn (to_rtx, temp);
3960 preserve_temp_slots (to_rtx);
3963 return want_value ? to_rtx : NULL_RTX;
3966 /* In case we are returning the contents of an object which overlaps
3967 the place the value is being stored, use a safe function when copying
3968 a value through a pointer into a structure value return block. */
3969 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3970 && current_function_returns_struct
3971 && !current_function_returns_pcc_struct)
3976 size = expr_size (from);
3977 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
3979 emit_library_call (memmove_libfunc, LCT_NORMAL,
3980 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3981 XEXP (from_rtx, 0), Pmode,
3982 convert_to_mode (TYPE_MODE (sizetype),
3983 size, TYPE_UNSIGNED (sizetype)),
3984 TYPE_MODE (sizetype));
3986 preserve_temp_slots (to_rtx);
3989 return want_value ? to_rtx : NULL_RTX;
3992 /* Compute FROM and store the value in the rtx we got. */
3995 result = store_expr (from, to_rtx, want_value);
3996 preserve_temp_slots (result);
3999 return want_value ? result : NULL_RTX;
4002 /* Generate code for computing expression EXP,
4003 and storing the value into TARGET.
4004 TARGET may contain a QUEUED rtx.
4006 If WANT_VALUE & 1 is nonzero, return a copy of the value
4007 not in TARGET, so that we can be sure to use the proper
4008 value in a containing expression even if TARGET has something
4009 else stored in it. If possible, we copy the value through a pseudo
4010 and return that pseudo. Or, if the value is constant, we try to
4011 return the constant. In some cases, we return a pseudo
4012 copied *from* TARGET.
4014 If the mode is BLKmode then we may return TARGET itself.
4015 It turns out that in BLKmode it doesn't cause a problem.
4016 because C has no operators that could combine two different
4017 assignments into the same BLKmode object with different values
4018 with no sequence point. Will other languages need this to
4021 If WANT_VALUE & 1 is 0, we return NULL, to make sure
4022 to catch quickly any cases where the caller uses the value
4023 and fails to set WANT_VALUE.
4025 If WANT_VALUE & 2 is set, this is a store into a call param on the
4026 stack, and block moves may need to be treated specially. */
4029 store_expr (tree exp, rtx target, int want_value)
4032 rtx alt_rtl = NULL_RTX;
4033 rtx mark = mark_queue ();
4034 int dont_return_target = 0;
4035 int dont_store_target = 0;
4037 if (VOID_TYPE_P (TREE_TYPE (exp)))
4039 /* C++ can generate ?: expressions with a throw expression in one
4040 branch and an rvalue in the other. Here, we resolve attempts to
4041 store the throw expression's nonexistent result. */
4044 expand_expr (exp, const0_rtx, VOIDmode, 0);
4047 if (TREE_CODE (exp) == COMPOUND_EXPR)
4049 /* Perform first part of compound expression, then assign from second
4051 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4052 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4054 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4056 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4058 /* For conditional expression, get safe form of the target. Then
4059 test the condition, doing the appropriate assignment on either
4060 side. This avoids the creation of unnecessary temporaries.
4061 For non-BLKmode, it is more efficient not to do this. */
4063 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4066 target = protect_from_queue (target, 1);
4068 do_pending_stack_adjust ();
4070 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4071 store_expr (TREE_OPERAND (exp, 1), target, want_value & 2);
4073 emit_jump_insn (gen_jump (lab2));
4076 store_expr (TREE_OPERAND (exp, 2), target, want_value & 2);
4081 return want_value & 1 ? target : NULL_RTX;
4083 else if (queued_subexp_p (target))
4084 /* If target contains a postincrement, let's not risk
4085 using it as the place to generate the rhs. */
4087 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4089 /* Expand EXP into a new pseudo. */
4090 temp = gen_reg_rtx (GET_MODE (target));
4091 temp = expand_expr (exp, temp, GET_MODE (target),
4093 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4096 temp = expand_expr (exp, NULL_RTX, GET_MODE (target),
4098 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4100 /* If target is volatile, ANSI requires accessing the value
4101 *from* the target, if it is accessed. So make that happen.
4102 In no case return the target itself. */
4103 if (! MEM_VOLATILE_P (target) && (want_value & 1) != 0)
4104 dont_return_target = 1;
4106 else if ((want_value & 1) != 0
4108 && ! MEM_VOLATILE_P (target)
4109 && GET_MODE (target) != BLKmode)
4110 /* If target is in memory and caller wants value in a register instead,
4111 arrange that. Pass TARGET as target for expand_expr so that,
4112 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4113 We know expand_expr will not use the target in that case.
4114 Don't do this if TARGET is volatile because we are supposed
4115 to write it and then read it. */
4117 temp = expand_expr (exp, target, GET_MODE (target),
4118 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4119 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4121 /* If TEMP is already in the desired TARGET, only copy it from
4122 memory and don't store it there again. */
4124 || (rtx_equal_p (temp, target)
4125 && ! side_effects_p (temp) && ! side_effects_p (target)))
4126 dont_store_target = 1;
4127 temp = copy_to_reg (temp);
4129 dont_return_target = 1;
4131 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4132 /* If this is a scalar in a register that is stored in a wider mode
4133 than the declared mode, compute the result into its declared mode
4134 and then convert to the wider mode. Our value is the computed
4137 rtx inner_target = 0;
4139 /* If we don't want a value, we can do the conversion inside EXP,
4140 which will often result in some optimizations. Do the conversion
4141 in two steps: first change the signedness, if needed, then
4142 the extend. But don't do this if the type of EXP is a subtype
4143 of something else since then the conversion might involve
4144 more than just converting modes. */
4145 if ((want_value & 1) == 0
4146 && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4147 && TREE_TYPE (TREE_TYPE (exp)) == 0
4148 && (!lang_hooks.reduce_bit_field_operations
4149 || (GET_MODE_PRECISION (GET_MODE (target))
4150 == TYPE_PRECISION (TREE_TYPE (exp)))))
4152 if (TYPE_UNSIGNED (TREE_TYPE (exp))
4153 != SUBREG_PROMOTED_UNSIGNED_P (target))
4155 (lang_hooks.types.signed_or_unsigned_type
4156 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4158 exp = convert (lang_hooks.types.type_for_mode
4159 (GET_MODE (SUBREG_REG (target)),
4160 SUBREG_PROMOTED_UNSIGNED_P (target)),
4163 inner_target = SUBREG_REG (target);
4166 temp = expand_expr (exp, inner_target, VOIDmode,
4167 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4169 /* If TEMP is a MEM and we want a result value, make the access
4170 now so it gets done only once. Strictly speaking, this is
4171 only necessary if the MEM is volatile, or if the address
4172 overlaps TARGET. But not performing the load twice also
4173 reduces the amount of rtl we generate and then have to CSE. */
4174 if (MEM_P (temp) && (want_value & 1) != 0)
4175 temp = copy_to_reg (temp);
4177 /* If TEMP is a VOIDmode constant, use convert_modes to make
4178 sure that we properly convert it. */
4179 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4181 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4182 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4183 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4184 GET_MODE (target), temp,
4185 SUBREG_PROMOTED_UNSIGNED_P (target));
4188 convert_move (SUBREG_REG (target), temp,
4189 SUBREG_PROMOTED_UNSIGNED_P (target));
4191 /* If we promoted a constant, change the mode back down to match
4192 target. Otherwise, the caller might get confused by a result whose
4193 mode is larger than expected. */
4195 if ((want_value & 1) != 0 && GET_MODE (temp) != GET_MODE (target))
4197 if (GET_MODE (temp) != VOIDmode)
4199 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4200 SUBREG_PROMOTED_VAR_P (temp) = 1;
4201 SUBREG_PROMOTED_UNSIGNED_SET (temp,
4202 SUBREG_PROMOTED_UNSIGNED_P (target));
4205 temp = convert_modes (GET_MODE (target),
4206 GET_MODE (SUBREG_REG (target)),
4207 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4210 return want_value & 1 ? temp : NULL_RTX;
4214 temp = expand_expr_real (exp, target, GET_MODE (target),
4216 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
4218 /* Return TARGET if it's a specified hardware register.
4219 If TARGET is a volatile mem ref, either return TARGET
4220 or return a reg copied *from* TARGET; ANSI requires this.
4222 Otherwise, if TEMP is not TARGET, return TEMP
4223 if it is constant (for efficiency),
4224 or if we really want the correct value. */
4225 if (!(target && REG_P (target)
4226 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4227 && !(MEM_P (target) && MEM_VOLATILE_P (target))
4228 && ! rtx_equal_p (temp, target)
4229 && (CONSTANT_P (temp) || (want_value & 1) != 0))
4230 dont_return_target = 1;
4233 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4234 the same as that of TARGET, adjust the constant. This is needed, for
4235 example, in case it is a CONST_DOUBLE and we want only a word-sized
4237 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4238 && TREE_CODE (exp) != ERROR_MARK
4239 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4240 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4241 temp, TYPE_UNSIGNED (TREE_TYPE (exp)));
4243 /* If value was not generated in the target, store it there.
4244 Convert the value to TARGET's type first if necessary and emit the
4245 pending incrementations that have been queued when expanding EXP.
4246 Note that we cannot emit the whole queue blindly because this will
4247 effectively disable the POST_INC optimization later.
4249 If TEMP and TARGET compare equal according to rtx_equal_p, but
4250 one or both of them are volatile memory refs, we have to distinguish
4252 - expand_expr has used TARGET. In this case, we must not generate
4253 another copy. This can be detected by TARGET being equal according
4255 - expand_expr has not used TARGET - that means that the source just
4256 happens to have the same RTX form. Since temp will have been created
4257 by expand_expr, it will compare unequal according to == .
4258 We must generate a copy in this case, to reach the correct number
4259 of volatile memory references. */
4261 if ((! rtx_equal_p (temp, target)
4262 || (temp != target && (side_effects_p (temp)
4263 || side_effects_p (target))))
4264 && TREE_CODE (exp) != ERROR_MARK
4265 && ! dont_store_target
4266 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4267 but TARGET is not valid memory reference, TEMP will differ
4268 from TARGET although it is really the same location. */
4269 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
4270 /* If there's nothing to copy, don't bother. Don't call expr_size
4271 unless necessary, because some front-ends (C++) expr_size-hook
4272 aborts on objects that are not supposed to be bit-copied or
4274 && expr_size (exp) != const0_rtx)
4276 emit_insns_enqueued_after_mark (mark);
4277 target = protect_from_queue (target, 1);
4278 temp = protect_from_queue (temp, 0);
4279 if (GET_MODE (temp) != GET_MODE (target)
4280 && GET_MODE (temp) != VOIDmode)
4282 int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
4283 if (dont_return_target)
4285 /* In this case, we will return TEMP,
4286 so make sure it has the proper mode.
4287 But don't forget to store the value into TARGET. */
4288 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4289 emit_move_insn (target, temp);
4292 convert_move (target, temp, unsignedp);
4295 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4297 /* Handle copying a string constant into an array. The string
4298 constant may be shorter than the array. So copy just the string's
4299 actual length, and clear the rest. First get the size of the data
4300 type of the string, which is actually the size of the target. */
4301 rtx size = expr_size (exp);
4303 if (GET_CODE (size) == CONST_INT
4304 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4305 emit_block_move (target, temp, size,
4307 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4310 /* Compute the size of the data to copy from the string. */
4312 = size_binop (MIN_EXPR,
4313 make_tree (sizetype, size),
4314 size_int (TREE_STRING_LENGTH (exp)));
4316 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4318 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4321 /* Copy that much. */
4322 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4323 TYPE_UNSIGNED (sizetype));
4324 emit_block_move (target, temp, copy_size_rtx,
4326 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4328 /* Figure out how much is left in TARGET that we have to clear.
4329 Do all calculations in ptr_mode. */
4330 if (GET_CODE (copy_size_rtx) == CONST_INT)
4332 size = plus_constant (size, -INTVAL (copy_size_rtx));
4333 target = adjust_address (target, BLKmode,
4334 INTVAL (copy_size_rtx));
4338 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4339 copy_size_rtx, NULL_RTX, 0,
4342 #ifdef POINTERS_EXTEND_UNSIGNED
4343 if (GET_MODE (copy_size_rtx) != Pmode)
4344 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4345 TYPE_UNSIGNED (sizetype));
4348 target = offset_address (target, copy_size_rtx,
4349 highest_pow2_factor (copy_size));
4350 label = gen_label_rtx ();
4351 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4352 GET_MODE (size), 0, label);
4355 if (size != const0_rtx)
4356 clear_storage (target, size);
4362 /* Handle calls that return values in multiple non-contiguous locations.
4363 The Irix 6 ABI has examples of this. */
4364 else if (GET_CODE (target) == PARALLEL)
4365 emit_group_load (target, temp, TREE_TYPE (exp),
4366 int_size_in_bytes (TREE_TYPE (exp)));
4367 else if (GET_MODE (temp) == BLKmode)
4368 emit_block_move (target, temp, expr_size (exp),
4370 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4373 temp = force_operand (temp, target);
4375 emit_move_insn (target, temp);
4379 /* If we don't want a value, return NULL_RTX. */
4380 if ((want_value & 1) == 0)
4383 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4384 ??? The latter test doesn't seem to make sense. */
4385 else if (dont_return_target && !MEM_P (temp))
4388 /* Return TARGET itself if it is a hard register. */
4389 else if ((want_value & 1) != 0
4390 && GET_MODE (target) != BLKmode
4391 && ! (REG_P (target)
4392 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4393 return copy_to_reg (target);
4399 /* Examine CTOR. Discover how many scalar fields are set to nonzero
4400 values and place it in *P_NZ_ELTS. Discover how many scalar fields
4401 are set to non-constant values and place it in *P_NC_ELTS. */
4404 categorize_ctor_elements_1 (tree ctor, HOST_WIDE_INT *p_nz_elts,
4405 HOST_WIDE_INT *p_nc_elts)
4407 HOST_WIDE_INT nz_elts, nc_elts;
4413 for (list = CONSTRUCTOR_ELTS (ctor); list; list = TREE_CHAIN (list))
4415 tree value = TREE_VALUE (list);
4416 tree purpose = TREE_PURPOSE (list);
4420 if (TREE_CODE (purpose) == RANGE_EXPR)
4422 tree lo_index = TREE_OPERAND (purpose, 0);
4423 tree hi_index = TREE_OPERAND (purpose, 1);
4425 if (host_integerp (lo_index, 1) && host_integerp (hi_index, 1))
4426 mult = (tree_low_cst (hi_index, 1)
4427 - tree_low_cst (lo_index, 1) + 1);
4430 switch (TREE_CODE (value))
4434 HOST_WIDE_INT nz = 0, nc = 0;
4435 categorize_ctor_elements_1 (value, &nz, &nc);
4436 nz_elts += mult * nz;
4437 nc_elts += mult * nc;
4443 if (!initializer_zerop (value))
4447 if (!initializer_zerop (TREE_REALPART (value)))
4449 if (!initializer_zerop (TREE_IMAGPART (value)))
4455 for (v = TREE_VECTOR_CST_ELTS (value); v; v = TREE_CHAIN (v))
4456 if (!initializer_zerop (TREE_VALUE (v)))
4463 if (!initializer_constant_valid_p (value, TREE_TYPE (value)))
4469 *p_nz_elts += nz_elts;
4470 *p_nc_elts += nc_elts;
4474 categorize_ctor_elements (tree ctor, HOST_WIDE_INT *p_nz_elts,
4475 HOST_WIDE_INT *p_nc_elts)
4479 categorize_ctor_elements_1 (ctor, p_nz_elts, p_nc_elts);
4482 /* Count the number of scalars in TYPE. Return -1 on overflow or
4486 count_type_elements (tree type)
4488 const HOST_WIDE_INT max = ~((HOST_WIDE_INT)1 << (HOST_BITS_PER_WIDE_INT-1));
4489 switch (TREE_CODE (type))
4493 tree telts = array_type_nelts (type);
4494 if (telts && host_integerp (telts, 1))
4496 HOST_WIDE_INT n = tree_low_cst (telts, 1) + 1;
4497 HOST_WIDE_INT m = count_type_elements (TREE_TYPE (type));
4500 else if (max / n > m)
4508 HOST_WIDE_INT n = 0, t;
4511 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
4512 if (TREE_CODE (f) == FIELD_DECL)
4514 t = count_type_elements (TREE_TYPE (f));
4524 case QUAL_UNION_TYPE:
4526 /* Ho hum. How in the world do we guess here? Clearly it isn't
4527 right to count the fields. Guess based on the number of words. */
4528 HOST_WIDE_INT n = int_size_in_bytes (type);
4531 return n / UNITS_PER_WORD;
4538 /* ??? This is broke. We should encode the vector width in the tree. */
4539 return GET_MODE_NUNITS (TYPE_MODE (type));
4548 case REFERENCE_TYPE:
4562 /* Return 1 if EXP contains mostly (3/4) zeros. */
4565 mostly_zeros_p (tree exp)
4567 if (TREE_CODE (exp) == CONSTRUCTOR)
4570 HOST_WIDE_INT nz_elts, nc_elts, elts;
4572 /* If there are no ranges of true bits, it is all zero. */
4573 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4574 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4576 categorize_ctor_elements (exp, &nz_elts, &nc_elts);
4577 elts = count_type_elements (TREE_TYPE (exp));
4579 return nz_elts < elts / 4;
4582 return initializer_zerop (exp);
4585 /* Helper function for store_constructor.
4586 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4587 TYPE is the type of the CONSTRUCTOR, not the element type.
4588 CLEARED is as for store_constructor.
4589 ALIAS_SET is the alias set to use for any stores.
4591 This provides a recursive shortcut back to store_constructor when it isn't
4592 necessary to go through store_field. This is so that we can pass through
4593 the cleared field to let store_constructor know that we may not have to
4594 clear a substructure if the outer structure has already been cleared. */
4597 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4598 HOST_WIDE_INT bitpos, enum machine_mode mode,
4599 tree exp, tree type, int cleared, int alias_set)
4601 if (TREE_CODE (exp) == CONSTRUCTOR
4602 /* We can only call store_constructor recursively if the size and
4603 bit position are on a byte boundary. */
4604 && bitpos % BITS_PER_UNIT == 0
4605 && (bitsize > 0 && bitsize % BITS_PER_UNIT == 0)
4606 /* If we have a nonzero bitpos for a register target, then we just
4607 let store_field do the bitfield handling. This is unlikely to
4608 generate unnecessary clear instructions anyways. */
4609 && (bitpos == 0 || MEM_P (target)))
4613 = adjust_address (target,
4614 GET_MODE (target) == BLKmode
4616 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4617 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4620 /* Update the alias set, if required. */
4621 if (MEM_P (target) && ! MEM_KEEP_ALIAS_SET_P (target)
4622 && MEM_ALIAS_SET (target) != 0)
4624 target = copy_rtx (target);
4625 set_mem_alias_set (target, alias_set);
4628 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4631 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4635 /* Store the value of constructor EXP into the rtx TARGET.
4636 TARGET is either a REG or a MEM; we know it cannot conflict, since
4637 safe_from_p has been called.
4638 CLEARED is true if TARGET is known to have been zero'd.
4639 SIZE is the number of bytes of TARGET we are allowed to modify: this
4640 may not be the same as the size of EXP if we are assigning to a field
4641 which has been packed to exclude padding bits. */
4644 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
4646 tree type = TREE_TYPE (exp);
4647 #ifdef WORD_REGISTER_OPERATIONS
4648 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4651 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4652 || TREE_CODE (type) == QUAL_UNION_TYPE)
4656 /* If size is zero or the target is already cleared, do nothing. */
4657 if (size == 0 || cleared)
4659 /* We either clear the aggregate or indicate the value is dead. */
4660 else if ((TREE_CODE (type) == UNION_TYPE
4661 || TREE_CODE (type) == QUAL_UNION_TYPE)
4662 && ! CONSTRUCTOR_ELTS (exp))
4663 /* If the constructor is empty, clear the union. */
4665 clear_storage (target, expr_size (exp));
4669 /* If we are building a static constructor into a register,
4670 set the initial value as zero so we can fold the value into
4671 a constant. But if more than one register is involved,
4672 this probably loses. */
4673 else if (REG_P (target) && TREE_STATIC (exp)
4674 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4676 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4680 /* If the constructor has fewer fields than the structure
4681 or if we are initializing the structure to mostly zeros,
4682 clear the whole structure first. Don't do this if TARGET is a
4683 register whose mode size isn't equal to SIZE since clear_storage
4684 can't handle this case. */
4686 && ((list_length (CONSTRUCTOR_ELTS (exp)) != fields_length (type))
4687 || mostly_zeros_p (exp))
4689 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4692 rtx xtarget = target;
4694 if (readonly_fields_p (type))
4696 xtarget = copy_rtx (xtarget);
4697 RTX_UNCHANGING_P (xtarget) = 1;
4700 clear_storage (xtarget, GEN_INT (size));
4705 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4707 /* Store each element of the constructor into
4708 the corresponding field of TARGET. */
4710 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4712 tree field = TREE_PURPOSE (elt);
4713 tree value = TREE_VALUE (elt);
4714 enum machine_mode mode;
4715 HOST_WIDE_INT bitsize;
4716 HOST_WIDE_INT bitpos = 0;
4718 rtx to_rtx = target;
4720 /* Just ignore missing fields.
4721 We cleared the whole structure, above,
4722 if any fields are missing. */
4726 if (cleared && initializer_zerop (value))
4729 if (host_integerp (DECL_SIZE (field), 1))
4730 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4734 mode = DECL_MODE (field);
4735 if (DECL_BIT_FIELD (field))
4738 offset = DECL_FIELD_OFFSET (field);
4739 if (host_integerp (offset, 0)
4740 && host_integerp (bit_position (field), 0))
4742 bitpos = int_bit_position (field);
4746 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4753 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (offset,
4754 make_tree (TREE_TYPE (exp),
4757 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4758 if (!MEM_P (to_rtx))
4761 #ifdef POINTERS_EXTEND_UNSIGNED
4762 if (GET_MODE (offset_rtx) != Pmode)
4763 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4765 if (GET_MODE (offset_rtx) != ptr_mode)
4766 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4769 to_rtx = offset_address (to_rtx, offset_rtx,
4770 highest_pow2_factor (offset));
4773 if (TREE_READONLY (field))
4776 to_rtx = copy_rtx (to_rtx);
4778 RTX_UNCHANGING_P (to_rtx) = 1;
4781 #ifdef WORD_REGISTER_OPERATIONS
4782 /* If this initializes a field that is smaller than a word, at the
4783 start of a word, try to widen it to a full word.
4784 This special case allows us to output C++ member function
4785 initializations in a form that the optimizers can understand. */
4787 && bitsize < BITS_PER_WORD
4788 && bitpos % BITS_PER_WORD == 0
4789 && GET_MODE_CLASS (mode) == MODE_INT
4790 && TREE_CODE (value) == INTEGER_CST
4792 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4794 tree type = TREE_TYPE (value);
4796 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4798 type = lang_hooks.types.type_for_size
4799 (BITS_PER_WORD, TYPE_UNSIGNED (type));
4800 value = convert (type, value);
4803 if (BYTES_BIG_ENDIAN)
4805 = fold (build (LSHIFT_EXPR, type, value,
4806 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4807 bitsize = BITS_PER_WORD;
4812 if (MEM_P (to_rtx) && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4813 && DECL_NONADDRESSABLE_P (field))
4815 to_rtx = copy_rtx (to_rtx);
4816 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4819 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4820 value, type, cleared,
4821 get_alias_set (TREE_TYPE (field)));
4824 else if (TREE_CODE (type) == ARRAY_TYPE
4825 || TREE_CODE (type) == VECTOR_TYPE)
4831 tree elttype = TREE_TYPE (type);
4833 HOST_WIDE_INT minelt = 0;
4834 HOST_WIDE_INT maxelt = 0;
4838 unsigned n_elts = 0;
4840 if (TREE_CODE (type) == ARRAY_TYPE)
4841 domain = TYPE_DOMAIN (type);
4843 /* Vectors do not have domains; look up the domain of
4844 the array embedded in the debug representation type.
4845 FIXME Would probably be more efficient to treat vectors
4846 separately from arrays. */
4848 domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
4849 domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
4850 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
4852 enum machine_mode mode = GET_MODE (target);
4854 icode = (int) vec_init_optab->handlers[mode].insn_code;
4855 if (icode != CODE_FOR_nothing)
4859 elt_size = GET_MODE_SIZE (GET_MODE_INNER (mode));
4860 n_elts = (GET_MODE_SIZE (mode) / elt_size);
4861 vector = alloca (n_elts);
4862 for (i = 0; i < n_elts; i++)
4863 vector [i] = CONST0_RTX (GET_MODE_INNER (mode));
4868 const_bounds_p = (TYPE_MIN_VALUE (domain)
4869 && TYPE_MAX_VALUE (domain)
4870 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4871 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4873 /* If we have constant bounds for the range of the type, get them. */
4876 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4877 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4880 /* If the constructor has fewer elements than the array,
4881 clear the whole array first. Similarly if this is
4882 static constructor of a non-BLKmode object. */
4883 if (cleared || (REG_P (target) && TREE_STATIC (exp)))
4887 HOST_WIDE_INT count = 0, zero_count = 0;
4888 need_to_clear = ! const_bounds_p;
4890 /* This loop is a more accurate version of the loop in
4891 mostly_zeros_p (it handles RANGE_EXPR in an index).
4892 It is also needed to check for missing elements. */
4893 for (elt = CONSTRUCTOR_ELTS (exp);
4894 elt != NULL_TREE && ! need_to_clear;
4895 elt = TREE_CHAIN (elt))
4897 tree index = TREE_PURPOSE (elt);
4898 HOST_WIDE_INT this_node_count;
4900 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4902 tree lo_index = TREE_OPERAND (index, 0);
4903 tree hi_index = TREE_OPERAND (index, 1);
4905 if (! host_integerp (lo_index, 1)
4906 || ! host_integerp (hi_index, 1))
4912 this_node_count = (tree_low_cst (hi_index, 1)
4913 - tree_low_cst (lo_index, 1) + 1);
4916 this_node_count = 1;
4918 count += this_node_count;
4919 if (mostly_zeros_p (TREE_VALUE (elt)))
4920 zero_count += this_node_count;
4923 /* Clear the entire array first if there are any missing elements,
4924 or if the incidence of zero elements is >= 75%. */
4926 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4930 if (need_to_clear && size > 0 && !vector)
4935 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4937 clear_storage (target, GEN_INT (size));
4941 else if (REG_P (target))
4942 /* Inform later passes that the old value is dead. */
4943 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4945 /* Store each element of the constructor into
4946 the corresponding element of TARGET, determined
4947 by counting the elements. */
4948 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4950 elt = TREE_CHAIN (elt), i++)
4952 enum machine_mode mode;
4953 HOST_WIDE_INT bitsize;
4954 HOST_WIDE_INT bitpos;
4956 tree value = TREE_VALUE (elt);
4957 tree index = TREE_PURPOSE (elt);
4958 rtx xtarget = target;
4960 if (cleared && initializer_zerop (value))
4963 unsignedp = TYPE_UNSIGNED (elttype);
4964 mode = TYPE_MODE (elttype);
4965 if (mode == BLKmode)
4966 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4967 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4970 bitsize = GET_MODE_BITSIZE (mode);
4972 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4974 tree lo_index = TREE_OPERAND (index, 0);
4975 tree hi_index = TREE_OPERAND (index, 1);
4976 rtx index_r, pos_rtx;
4977 HOST_WIDE_INT lo, hi, count;
4983 /* If the range is constant and "small", unroll the loop. */
4985 && host_integerp (lo_index, 0)
4986 && host_integerp (hi_index, 0)
4987 && (lo = tree_low_cst (lo_index, 0),
4988 hi = tree_low_cst (hi_index, 0),
4989 count = hi - lo + 1,
4992 || (host_integerp (TYPE_SIZE (elttype), 1)
4993 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4996 lo -= minelt; hi -= minelt;
4997 for (; lo <= hi; lo++)
4999 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5002 && !MEM_KEEP_ALIAS_SET_P (target)
5003 && TREE_CODE (type) == ARRAY_TYPE
5004 && TYPE_NONALIASED_COMPONENT (type))
5006 target = copy_rtx (target);
5007 MEM_KEEP_ALIAS_SET_P (target) = 1;
5010 store_constructor_field
5011 (target, bitsize, bitpos, mode, value, type, cleared,
5012 get_alias_set (elttype));
5017 rtx loop_start = gen_label_rtx ();
5018 rtx loop_end = gen_label_rtx ();
5021 expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
5022 unsignedp = TYPE_UNSIGNED (domain);
5024 index = build_decl (VAR_DECL, NULL_TREE, domain);
5027 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5029 SET_DECL_RTL (index, index_r);
5030 store_expr (lo_index, index_r, 0);
5032 /* Build the head of the loop. */
5033 do_pending_stack_adjust ();
5035 emit_label (loop_start);
5037 /* Assign value to element index. */
5039 = convert (ssizetype,
5040 fold (build (MINUS_EXPR, TREE_TYPE (index),
5041 index, TYPE_MIN_VALUE (domain))));
5042 position = size_binop (MULT_EXPR, position,
5044 TYPE_SIZE_UNIT (elttype)));
5046 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
5047 xtarget = offset_address (target, pos_rtx,
5048 highest_pow2_factor (position));
5049 xtarget = adjust_address (xtarget, mode, 0);
5050 if (TREE_CODE (value) == CONSTRUCTOR)
5051 store_constructor (value, xtarget, cleared,
5052 bitsize / BITS_PER_UNIT);
5054 store_expr (value, xtarget, 0);
5056 /* Generate a conditional jump to exit the loop. */
5057 exit_cond = build (LT_EXPR, integer_type_node,
5059 jumpif (exit_cond, loop_end);
5061 /* Update the loop counter, and jump to the head of
5063 expand_increment (build (PREINCREMENT_EXPR,
5065 index, integer_one_node), 0, 0);
5066 emit_jump (loop_start);
5068 /* Build the end of the loop. */
5069 emit_label (loop_end);
5072 else if ((index != 0 && ! host_integerp (index, 0))
5073 || ! host_integerp (TYPE_SIZE (elttype), 1))
5081 index = ssize_int (1);
5084 index = convert (ssizetype,
5085 fold (build (MINUS_EXPR, index,
5086 TYPE_MIN_VALUE (domain))));
5088 position = size_binop (MULT_EXPR, index,
5090 TYPE_SIZE_UNIT (elttype)));
5091 xtarget = offset_address (target,
5092 expand_expr (position, 0, VOIDmode, 0),
5093 highest_pow2_factor (position));
5094 xtarget = adjust_address (xtarget, mode, 0);
5095 store_expr (value, xtarget, 0);
5102 pos = tree_low_cst (index, 0) - minelt;
5105 vector[pos] = expand_expr (value, NULL_RTX, VOIDmode, 0);
5110 bitpos = ((tree_low_cst (index, 0) - minelt)
5111 * tree_low_cst (TYPE_SIZE (elttype), 1));
5113 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5115 if (MEM_P (target) && !MEM_KEEP_ALIAS_SET_P (target)
5116 && TREE_CODE (type) == ARRAY_TYPE
5117 && TYPE_NONALIASED_COMPONENT (type))
5119 target = copy_rtx (target);
5120 MEM_KEEP_ALIAS_SET_P (target) = 1;
5122 store_constructor_field (target, bitsize, bitpos, mode, value,
5123 type, cleared, get_alias_set (elttype));
5128 emit_insn (GEN_FCN (icode) (target,
5129 gen_rtx_PARALLEL (GET_MODE (target),
5130 gen_rtvec_v (n_elts, vector))));
5134 /* Set constructor assignments. */
5135 else if (TREE_CODE (type) == SET_TYPE)
5137 tree elt = CONSTRUCTOR_ELTS (exp);
5138 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
5139 tree domain = TYPE_DOMAIN (type);
5140 tree domain_min, domain_max, bitlength;
5142 /* The default implementation strategy is to extract the constant
5143 parts of the constructor, use that to initialize the target,
5144 and then "or" in whatever non-constant ranges we need in addition.
5146 If a large set is all zero or all ones, it is
5147 probably better to set it using memset.
5148 Also, if a large set has just a single range, it may also be
5149 better to first clear all the first clear the set (using
5150 memset), and set the bits we want. */
5152 /* Check for all zeros. */
5153 if (elt == NULL_TREE && size > 0)
5156 clear_storage (target, GEN_INT (size));
5160 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
5161 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
5162 bitlength = size_binop (PLUS_EXPR,
5163 size_diffop (domain_max, domain_min),
5166 nbits = tree_low_cst (bitlength, 1);
5168 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
5169 are "complicated" (more than one range), initialize (the
5170 constant parts) by copying from a constant. */
5171 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
5172 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
5174 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
5175 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
5176 char *bit_buffer = alloca (nbits);
5177 HOST_WIDE_INT word = 0;
5178 unsigned int bit_pos = 0;
5179 unsigned int ibit = 0;
5180 unsigned int offset = 0; /* In bytes from beginning of set. */
5182 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
5185 if (bit_buffer[ibit])
5187 if (BYTES_BIG_ENDIAN)
5188 word |= (1 << (set_word_size - 1 - bit_pos));
5190 word |= 1 << bit_pos;
5194 if (bit_pos >= set_word_size || ibit == nbits)
5196 if (word != 0 || ! cleared)
5198 rtx datum = gen_int_mode (word, mode);
5201 /* The assumption here is that it is safe to use
5202 XEXP if the set is multi-word, but not if
5203 it's single-word. */
5205 to_rtx = adjust_address (target, mode, offset);
5206 else if (offset == 0)
5210 emit_move_insn (to_rtx, datum);
5217 offset += set_word_size / BITS_PER_UNIT;
5222 /* Don't bother clearing storage if the set is all ones. */
5223 if (TREE_CHAIN (elt) != NULL_TREE
5224 || (TREE_PURPOSE (elt) == NULL_TREE
5226 : ( ! host_integerp (TREE_VALUE (elt), 0)
5227 || ! host_integerp (TREE_PURPOSE (elt), 0)
5228 || (tree_low_cst (TREE_VALUE (elt), 0)
5229 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5230 != (HOST_WIDE_INT) nbits))))
5231 clear_storage (target, expr_size (exp));
5233 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5235 /* Start of range of element or NULL. */
5236 tree startbit = TREE_PURPOSE (elt);
5237 /* End of range of element, or element value. */
5238 tree endbit = TREE_VALUE (elt);
5239 HOST_WIDE_INT startb, endb;
5240 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5242 bitlength_rtx = expand_expr (bitlength,
5243 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5245 /* Handle non-range tuple element like [ expr ]. */
5246 if (startbit == NULL_TREE)
5248 startbit = save_expr (endbit);
5252 startbit = convert (sizetype, startbit);
5253 endbit = convert (sizetype, endbit);
5254 if (! integer_zerop (domain_min))
5256 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5257 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5259 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5260 EXPAND_CONST_ADDRESS);
5261 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5262 EXPAND_CONST_ADDRESS);
5268 ((build_qualified_type (lang_hooks.types.type_for_mode
5269 (GET_MODE (target), 0),
5272 emit_move_insn (targetx, target);
5275 else if (MEM_P (target))
5280 /* Optimization: If startbit and endbit are constants divisible
5281 by BITS_PER_UNIT, call memset instead. */
5282 if (TREE_CODE (startbit) == INTEGER_CST
5283 && TREE_CODE (endbit) == INTEGER_CST
5284 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5285 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5287 emit_library_call (memset_libfunc, LCT_NORMAL,
5289 plus_constant (XEXP (targetx, 0),
5290 startb / BITS_PER_UNIT),
5292 constm1_rtx, TYPE_MODE (integer_type_node),
5293 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5294 TYPE_MODE (sizetype));
5297 emit_library_call (setbits_libfunc, LCT_NORMAL,
5298 VOIDmode, 4, XEXP (targetx, 0),
5299 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5300 startbit_rtx, TYPE_MODE (sizetype),
5301 endbit_rtx, TYPE_MODE (sizetype));
5304 emit_move_insn (target, targetx);
5312 /* Store the value of EXP (an expression tree)
5313 into a subfield of TARGET which has mode MODE and occupies
5314 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5315 If MODE is VOIDmode, it means that we are storing into a bit-field.
5317 If VALUE_MODE is VOIDmode, return nothing in particular.
5318 UNSIGNEDP is not used in this case.
5320 Otherwise, return an rtx for the value stored. This rtx
5321 has mode VALUE_MODE if that is convenient to do.
5322 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5324 TYPE is the type of the underlying object,
5326 ALIAS_SET is the alias set for the destination. This value will
5327 (in general) be different from that for TARGET, since TARGET is a
5328 reference to the containing structure. */
5331 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5332 enum machine_mode mode, tree exp, enum machine_mode value_mode,
5333 int unsignedp, tree type, int alias_set)
5335 HOST_WIDE_INT width_mask = 0;
5337 if (TREE_CODE (exp) == ERROR_MARK)
5340 /* If we have nothing to store, do nothing unless the expression has
5343 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5344 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5345 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5347 /* If we are storing into an unaligned field of an aligned union that is
5348 in a register, we may have the mode of TARGET being an integer mode but
5349 MODE == BLKmode. In that case, get an aligned object whose size and
5350 alignment are the same as TARGET and store TARGET into it (we can avoid
5351 the store if the field being stored is the entire width of TARGET). Then
5352 call ourselves recursively to store the field into a BLKmode version of
5353 that object. Finally, load from the object into TARGET. This is not
5354 very efficient in general, but should only be slightly more expensive
5355 than the otherwise-required unaligned accesses. Perhaps this can be
5356 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5357 twice, once with emit_move_insn and once via store_field. */
5360 && (REG_P (target) || GET_CODE (target) == SUBREG))
5362 rtx object = assign_temp (type, 0, 1, 1);
5363 rtx blk_object = adjust_address (object, BLKmode, 0);
5365 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5366 emit_move_insn (object, target);
5368 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5371 emit_move_insn (target, object);
5373 /* We want to return the BLKmode version of the data. */
5377 if (GET_CODE (target) == CONCAT)
5379 /* We're storing into a struct containing a single __complex. */
5383 return store_expr (exp, target, value_mode != VOIDmode);
5386 /* If the structure is in a register or if the component
5387 is a bit field, we cannot use addressing to access it.
5388 Use bit-field techniques or SUBREG to store in it. */
5390 if (mode == VOIDmode
5391 || (mode != BLKmode && ! direct_store[(int) mode]
5392 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5393 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5395 || GET_CODE (target) == SUBREG
5396 /* If the field isn't aligned enough to store as an ordinary memref,
5397 store it as a bit field. */
5399 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5400 || bitpos % GET_MODE_ALIGNMENT (mode))
5401 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5402 || (bitpos % BITS_PER_UNIT != 0)))
5403 /* If the RHS and field are a constant size and the size of the
5404 RHS isn't the same size as the bitfield, we must use bitfield
5407 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5408 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5410 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5412 /* If BITSIZE is narrower than the size of the type of EXP
5413 we will be narrowing TEMP. Normally, what's wanted are the
5414 low-order bits. However, if EXP's type is a record and this is
5415 big-endian machine, we want the upper BITSIZE bits. */
5416 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5417 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5418 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5419 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5420 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5424 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5426 if (mode != VOIDmode && mode != BLKmode
5427 && mode != TYPE_MODE (TREE_TYPE (exp)))
5428 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5430 /* If the modes of TARGET and TEMP are both BLKmode, both
5431 must be in memory and BITPOS must be aligned on a byte
5432 boundary. If so, we simply do a block copy. */
5433 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5435 if (!MEM_P (target) || !MEM_P (temp)
5436 || bitpos % BITS_PER_UNIT != 0)
5439 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5440 emit_block_move (target, temp,
5441 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5445 return value_mode == VOIDmode ? const0_rtx : target;
5448 /* Store the value in the bitfield. */
5449 store_bit_field (target, bitsize, bitpos, mode, temp);
5451 if (value_mode != VOIDmode)
5453 /* The caller wants an rtx for the value.
5454 If possible, avoid refetching from the bitfield itself. */
5456 && ! (MEM_P (target) && MEM_VOLATILE_P (target)))
5459 enum machine_mode tmode;
5461 tmode = GET_MODE (temp);
5462 if (tmode == VOIDmode)
5466 return expand_and (tmode, temp,
5467 gen_int_mode (width_mask, tmode),
5470 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5471 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5472 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5475 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5476 NULL_RTX, value_mode, VOIDmode);
5482 rtx addr = XEXP (target, 0);
5483 rtx to_rtx = target;
5485 /* If a value is wanted, it must be the lhs;
5486 so make the address stable for multiple use. */
5488 if (value_mode != VOIDmode && !REG_P (addr)
5489 && ! CONSTANT_ADDRESS_P (addr)
5490 /* A frame-pointer reference is already stable. */
5491 && ! (GET_CODE (addr) == PLUS
5492 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5493 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5494 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5495 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5497 /* Now build a reference to just the desired component. */
5499 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5501 if (to_rtx == target)
5502 to_rtx = copy_rtx (to_rtx);
5504 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5505 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5506 set_mem_alias_set (to_rtx, alias_set);
5508 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5512 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5513 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5514 codes and find the ultimate containing object, which we return.
5516 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5517 bit position, and *PUNSIGNEDP to the signedness of the field.
5518 If the position of the field is variable, we store a tree
5519 giving the variable offset (in units) in *POFFSET.
5520 This offset is in addition to the bit position.
5521 If the position is not variable, we store 0 in *POFFSET.
5523 If any of the extraction expressions is volatile,
5524 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5526 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5527 is a mode that can be used to access the field. In that case, *PBITSIZE
5530 If the field describes a variable-sized object, *PMODE is set to
5531 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5532 this case, but the address of the object can be found. */
5535 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5536 HOST_WIDE_INT *pbitpos, tree *poffset,
5537 enum machine_mode *pmode, int *punsignedp,
5541 enum machine_mode mode = VOIDmode;
5542 tree offset = size_zero_node;
5543 tree bit_offset = bitsize_zero_node;
5546 /* First get the mode, signedness, and size. We do this from just the
5547 outermost expression. */
5548 if (TREE_CODE (exp) == COMPONENT_REF)
5550 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5551 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5552 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5554 *punsignedp = DECL_UNSIGNED (TREE_OPERAND (exp, 1));
5556 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5558 size_tree = TREE_OPERAND (exp, 1);
5559 *punsignedp = BIT_FIELD_REF_UNSIGNED (exp);
5563 mode = TYPE_MODE (TREE_TYPE (exp));
5564 *punsignedp = TYPE_UNSIGNED (TREE_TYPE (exp));
5566 if (mode == BLKmode)
5567 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5569 *pbitsize = GET_MODE_BITSIZE (mode);
5574 if (! host_integerp (size_tree, 1))
5575 mode = BLKmode, *pbitsize = -1;
5577 *pbitsize = tree_low_cst (size_tree, 1);
5580 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5581 and find the ultimate containing object. */
5584 if (TREE_CODE (exp) == BIT_FIELD_REF)
5585 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5586 else if (TREE_CODE (exp) == COMPONENT_REF)
5588 tree field = TREE_OPERAND (exp, 1);
5589 tree this_offset = component_ref_field_offset (exp);
5591 /* If this field hasn't been filled in yet, don't go
5592 past it. This should only happen when folding expressions
5593 made during type construction. */
5594 if (this_offset == 0)
5597 offset = size_binop (PLUS_EXPR, offset, this_offset);
5598 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5599 DECL_FIELD_BIT_OFFSET (field));
5601 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5604 else if (TREE_CODE (exp) == ARRAY_REF
5605 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5607 tree index = TREE_OPERAND (exp, 1);
5608 tree low_bound = array_ref_low_bound (exp);
5609 tree unit_size = array_ref_element_size (exp);
5611 /* We assume all arrays have sizes that are a multiple of a byte.
5612 First subtract the lower bound, if any, in the type of the
5613 index, then convert to sizetype and multiply by the size of the
5615 if (! integer_zerop (low_bound))
5616 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5619 offset = size_binop (PLUS_EXPR, offset,
5620 size_binop (MULT_EXPR,
5621 convert (sizetype, index),
5625 /* We can go inside most conversions: all NON_VALUE_EXPRs, all normal
5626 conversions that don't change the mode, and all view conversions
5627 except those that need to "step up" the alignment. */
5628 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5629 && ! (TREE_CODE (exp) == VIEW_CONVERT_EXPR
5630 && ! ((TYPE_ALIGN (TREE_TYPE (exp))
5631 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5633 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5634 < BIGGEST_ALIGNMENT)
5635 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5636 || TYPE_ALIGN_OK (TREE_TYPE
5637 (TREE_OPERAND (exp, 0))))))
5638 && ! ((TREE_CODE (exp) == NOP_EXPR
5639 || TREE_CODE (exp) == CONVERT_EXPR)
5640 && (TYPE_MODE (TREE_TYPE (exp))
5641 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5644 /* If any reference in the chain is volatile, the effect is volatile. */
5645 if (TREE_THIS_VOLATILE (exp))
5648 exp = TREE_OPERAND (exp, 0);
5651 /* If OFFSET is constant, see if we can return the whole thing as a
5652 constant bit position. Otherwise, split it up. */
5653 if (host_integerp (offset, 0)
5654 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5656 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5657 && host_integerp (tem, 0))
5658 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5660 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5666 /* Return a tree of sizetype representing the size, in bytes, of the element
5667 of EXP, an ARRAY_REF. */
5670 array_ref_element_size (tree exp)
5672 tree aligned_size = TREE_OPERAND (exp, 3);
5673 tree elmt_type = TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5675 /* If a size was specified in the ARRAY_REF, it's the size measured
5676 in alignment units of the element type. So multiply by that value. */
5678 return size_binop (MULT_EXPR, aligned_size,
5679 size_int (TYPE_ALIGN (elmt_type) / BITS_PER_UNIT));
5681 /* Otherwise, take the size from that of the element type. Substitute
5682 any PLACEHOLDER_EXPR that we have. */
5684 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (elmt_type), exp);
5687 /* Return a tree representing the lower bound of the array mentioned in
5688 EXP, an ARRAY_REF. */
5691 array_ref_low_bound (tree exp)
5693 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5695 /* If a lower bound is specified in EXP, use it. */
5696 if (TREE_OPERAND (exp, 2))
5697 return TREE_OPERAND (exp, 2);
5699 /* Otherwise, if there is a domain type and it has a lower bound, use it,
5700 substituting for a PLACEHOLDER_EXPR as needed. */
5701 if (domain_type && TYPE_MIN_VALUE (domain_type))
5702 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MIN_VALUE (domain_type), exp);
5704 /* Otherwise, return a zero of the appropriate type. */
5705 return fold_convert (TREE_TYPE (TREE_OPERAND (exp, 1)), integer_zero_node);
5708 /* Return a tree representing the upper bound of the array mentioned in
5709 EXP, an ARRAY_REF. */
5712 array_ref_up_bound (tree exp)
5714 tree domain_type = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5716 /* If there is a domain type and it has an upper bound, use it, substituting
5717 for a PLACEHOLDER_EXPR as needed. */
5718 if (domain_type && TYPE_MAX_VALUE (domain_type))
5719 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_MAX_VALUE (domain_type), exp);
5721 /* Otherwise fail. */
5725 /* Return a tree representing the offset, in bytes, of the field referenced
5726 by EXP. This does not include any offset in DECL_FIELD_BIT_OFFSET. */
5729 component_ref_field_offset (tree exp)
5731 tree aligned_offset = TREE_OPERAND (exp, 2);
5732 tree field = TREE_OPERAND (exp, 1);
5734 /* If an offset was specified in the COMPONENT_REF, it's the offset measured
5735 in units of DECL_OFFSET_ALIGN / BITS_PER_UNIT. So multiply by that
5738 return size_binop (MULT_EXPR, aligned_offset,
5739 size_int (DECL_OFFSET_ALIGN (field) / BITS_PER_UNIT));
5741 /* Otherwise, take the offset from that of the field. Substitute
5742 any PLACEHOLDER_EXPR that we have. */
5744 return SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_FIELD_OFFSET (field), exp);
5747 /* Return 1 if T is an expression that get_inner_reference handles. */
5750 handled_component_p (tree t)
5752 switch (TREE_CODE (t))
5757 case ARRAY_RANGE_REF:
5758 case NON_LVALUE_EXPR:
5759 case VIEW_CONVERT_EXPR:
5762 /* ??? Sure they are handled, but get_inner_reference may return
5763 a different PBITSIZE, depending upon whether the expression is
5764 wrapped up in a NOP_EXPR or not, e.g. for bitfields. */
5767 return (TYPE_MODE (TREE_TYPE (t))
5768 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5775 /* Given an rtx VALUE that may contain additions and multiplications, return
5776 an equivalent value that just refers to a register, memory, or constant.
5777 This is done by generating instructions to perform the arithmetic and
5778 returning a pseudo-register containing the value.
5780 The returned value may be a REG, SUBREG, MEM or constant. */
5783 force_operand (rtx value, rtx target)
5786 /* Use subtarget as the target for operand 0 of a binary operation. */
5787 rtx subtarget = get_subtarget (target);
5788 enum rtx_code code = GET_CODE (value);
5790 /* Check for subreg applied to an expression produced by loop optimizer. */
5792 && !REG_P (SUBREG_REG (value))
5793 && !MEM_P (SUBREG_REG (value)))
5795 value = simplify_gen_subreg (GET_MODE (value),
5796 force_reg (GET_MODE (SUBREG_REG (value)),
5797 force_operand (SUBREG_REG (value),
5799 GET_MODE (SUBREG_REG (value)),
5800 SUBREG_BYTE (value));
5801 code = GET_CODE (value);
5804 /* Check for a PIC address load. */
5805 if ((code == PLUS || code == MINUS)
5806 && XEXP (value, 0) == pic_offset_table_rtx
5807 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5808 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5809 || GET_CODE (XEXP (value, 1)) == CONST))
5812 subtarget = gen_reg_rtx (GET_MODE (value));
5813 emit_move_insn (subtarget, value);
5817 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5820 target = gen_reg_rtx (GET_MODE (value));
5821 convert_move (target, force_operand (XEXP (value, 0), NULL),
5822 code == ZERO_EXTEND);
5826 if (ARITHMETIC_P (value))
5828 op2 = XEXP (value, 1);
5829 if (!CONSTANT_P (op2) && !(REG_P (op2) && op2 != subtarget))
5831 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5834 op2 = negate_rtx (GET_MODE (value), op2);
5837 /* Check for an addition with OP2 a constant integer and our first
5838 operand a PLUS of a virtual register and something else. In that
5839 case, we want to emit the sum of the virtual register and the
5840 constant first and then add the other value. This allows virtual
5841 register instantiation to simply modify the constant rather than
5842 creating another one around this addition. */
5843 if (code == PLUS && GET_CODE (op2) == CONST_INT
5844 && GET_CODE (XEXP (value, 0)) == PLUS
5845 && REG_P (XEXP (XEXP (value, 0), 0))
5846 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5847 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5849 rtx temp = expand_simple_binop (GET_MODE (value), code,
5850 XEXP (XEXP (value, 0), 0), op2,
5851 subtarget, 0, OPTAB_LIB_WIDEN);
5852 return expand_simple_binop (GET_MODE (value), code, temp,
5853 force_operand (XEXP (XEXP (value,
5855 target, 0, OPTAB_LIB_WIDEN);
5858 op1 = force_operand (XEXP (value, 0), subtarget);
5859 op2 = force_operand (op2, NULL_RTX);
5863 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5865 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5866 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5867 target, 1, OPTAB_LIB_WIDEN);
5869 return expand_divmod (0,
5870 FLOAT_MODE_P (GET_MODE (value))
5871 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5872 GET_MODE (value), op1, op2, target, 0);
5875 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5879 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5883 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5887 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5888 target, 0, OPTAB_LIB_WIDEN);
5891 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5892 target, 1, OPTAB_LIB_WIDEN);
5895 if (UNARY_P (value))
5897 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5898 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
5901 #ifdef INSN_SCHEDULING
5902 /* On machines that have insn scheduling, we want all memory reference to be
5903 explicit, so we need to deal with such paradoxical SUBREGs. */
5904 if (GET_CODE (value) == SUBREG && MEM_P (SUBREG_REG (value))
5905 && (GET_MODE_SIZE (GET_MODE (value))
5906 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5908 = simplify_gen_subreg (GET_MODE (value),
5909 force_reg (GET_MODE (SUBREG_REG (value)),
5910 force_operand (SUBREG_REG (value),
5912 GET_MODE (SUBREG_REG (value)),
5913 SUBREG_BYTE (value));
5919 /* Subroutine of expand_expr: return nonzero iff there is no way that
5920 EXP can reference X, which is being modified. TOP_P is nonzero if this
5921 call is going to be used to determine whether we need a temporary
5922 for EXP, as opposed to a recursive call to this function.
5924 It is always safe for this routine to return zero since it merely
5925 searches for optimization opportunities. */
5928 safe_from_p (rtx x, tree exp, int top_p)
5934 /* If EXP has varying size, we MUST use a target since we currently
5935 have no way of allocating temporaries of variable size
5936 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5937 So we assume here that something at a higher level has prevented a
5938 clash. This is somewhat bogus, but the best we can do. Only
5939 do this when X is BLKmode and when we are at the top level. */
5940 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5941 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5942 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5943 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5944 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5946 && GET_MODE (x) == BLKmode)
5947 /* If X is in the outgoing argument area, it is always safe. */
5949 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5950 || (GET_CODE (XEXP (x, 0)) == PLUS
5951 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5954 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5955 find the underlying pseudo. */
5956 if (GET_CODE (x) == SUBREG)
5959 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
5963 /* Now look at our tree code and possibly recurse. */
5964 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5967 exp_rtl = DECL_RTL_IF_SET (exp);
5974 if (TREE_CODE (exp) == TREE_LIST)
5978 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
5980 exp = TREE_CHAIN (exp);
5983 if (TREE_CODE (exp) != TREE_LIST)
5984 return safe_from_p (x, exp, 0);
5987 else if (TREE_CODE (exp) == ERROR_MARK)
5988 return 1; /* An already-visited SAVE_EXPR? */
5993 /* The only case we look at here is the DECL_INITIAL inside a
5995 return (TREE_CODE (exp) != DECL_EXPR
5996 || TREE_CODE (DECL_EXPR_DECL (exp)) != VAR_DECL
5997 || !DECL_INITIAL (DECL_EXPR_DECL (exp))
5998 || safe_from_p (x, DECL_INITIAL (DECL_EXPR_DECL (exp)), 0));
6002 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
6007 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6011 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6012 the expression. If it is set, we conflict iff we are that rtx or
6013 both are in memory. Otherwise, we check all operands of the
6014 expression recursively. */
6016 switch (TREE_CODE (exp))
6019 /* If the operand is static or we are static, we can't conflict.
6020 Likewise if we don't conflict with the operand at all. */
6021 if (staticp (TREE_OPERAND (exp, 0))
6022 || TREE_STATIC (exp)
6023 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6026 /* Otherwise, the only way this can conflict is if we are taking
6027 the address of a DECL a that address if part of X, which is
6029 exp = TREE_OPERAND (exp, 0);
6032 if (!DECL_RTL_SET_P (exp)
6033 || !MEM_P (DECL_RTL (exp)))
6036 exp_rtl = XEXP (DECL_RTL (exp), 0);
6042 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6043 get_alias_set (exp)))
6048 /* Assume that the call will clobber all hard registers and
6050 if ((REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
6055 case WITH_CLEANUP_EXPR:
6056 case CLEANUP_POINT_EXPR:
6057 /* Lowered by gimplify.c. */
6061 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6064 /* The only operand we look at is operand 1. The rest aren't
6065 part of the expression. */
6066 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
6072 /* If we have an rtx, we do not need to scan our operands. */
6076 nops = first_rtl_op (TREE_CODE (exp));
6077 for (i = 0; i < nops; i++)
6078 if (TREE_OPERAND (exp, i) != 0
6079 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6082 /* If this is a language-specific tree code, it may require
6083 special handling. */
6084 if ((unsigned int) TREE_CODE (exp)
6085 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
6086 && !lang_hooks.safe_from_p (x, exp))
6090 /* If we have an rtl, find any enclosed object. Then see if we conflict
6094 if (GET_CODE (exp_rtl) == SUBREG)
6096 exp_rtl = SUBREG_REG (exp_rtl);
6098 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6102 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6103 are memory and they conflict. */
6104 return ! (rtx_equal_p (x, exp_rtl)
6105 || (MEM_P (x) && MEM_P (exp_rtl)
6106 && true_dependence (exp_rtl, VOIDmode, x,
6107 rtx_addr_varies_p)));
6110 /* If we reach here, it is safe. */
6114 /* Subroutine of expand_expr: return rtx if EXP is a
6115 variable or parameter; else return 0. */
6121 switch (TREE_CODE (exp))
6125 return DECL_RTL (exp);
6131 /* Return the highest power of two that EXP is known to be a multiple of.
6132 This is used in updating alignment of MEMs in array references. */
6134 static unsigned HOST_WIDE_INT
6135 highest_pow2_factor (tree exp)
6137 unsigned HOST_WIDE_INT c0, c1;
6139 switch (TREE_CODE (exp))
6142 /* We can find the lowest bit that's a one. If the low
6143 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6144 We need to handle this case since we can find it in a COND_EXPR,
6145 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
6146 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6148 if (TREE_CONSTANT_OVERFLOW (exp))
6149 return BIGGEST_ALIGNMENT;
6152 /* Note: tree_low_cst is intentionally not used here,
6153 we don't care about the upper bits. */
6154 c0 = TREE_INT_CST_LOW (exp);
6156 return c0 ? c0 : BIGGEST_ALIGNMENT;
6160 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6161 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6162 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6163 return MIN (c0, c1);
6166 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6167 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6170 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6172 if (integer_pow2p (TREE_OPERAND (exp, 1))
6173 && host_integerp (TREE_OPERAND (exp, 1), 1))
6175 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6176 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6177 return MAX (1, c0 / c1);
6181 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6183 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6186 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6189 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6190 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6191 return MIN (c0, c1);
6200 /* Similar, except that the alignment requirements of TARGET are
6201 taken into account. Assume it is at least as aligned as its
6202 type, unless it is a COMPONENT_REF in which case the layout of
6203 the structure gives the alignment. */
6205 static unsigned HOST_WIDE_INT
6206 highest_pow2_factor_for_target (tree target, tree exp)
6208 unsigned HOST_WIDE_INT target_align, factor;
6210 factor = highest_pow2_factor (exp);
6211 if (TREE_CODE (target) == COMPONENT_REF)
6212 target_align = DECL_ALIGN (TREE_OPERAND (target, 1)) / BITS_PER_UNIT;
6214 target_align = TYPE_ALIGN (TREE_TYPE (target)) / BITS_PER_UNIT;
6215 return MAX (factor, target_align);
6218 /* Expands variable VAR. */
6221 expand_var (tree var)
6223 if (DECL_EXTERNAL (var))
6226 if (TREE_STATIC (var))
6227 /* If this is an inlined copy of a static local variable,
6228 look up the original decl. */
6229 var = DECL_ORIGIN (var);
6231 if (TREE_STATIC (var)
6232 ? !TREE_ASM_WRITTEN (var)
6233 : !DECL_RTL_SET_P (var))
6235 if (TREE_CODE (var) == VAR_DECL && DECL_DEFER_OUTPUT (var))
6237 /* Prepare a mem & address for the decl. */
6240 if (TREE_STATIC (var))
6243 x = gen_rtx_MEM (DECL_MODE (var),
6244 gen_reg_rtx (Pmode));
6246 set_mem_attributes (x, var, 1);
6247 SET_DECL_RTL (var, x);
6249 else if (lang_hooks.expand_decl (var))
6251 else if (TREE_CODE (var) == VAR_DECL && !TREE_STATIC (var))
6253 else if (TREE_CODE (var) == VAR_DECL && TREE_STATIC (var))
6254 rest_of_decl_compilation (var, NULL, 0, 0);
6255 else if (TREE_CODE (var) == TYPE_DECL
6256 || TREE_CODE (var) == CONST_DECL
6257 || TREE_CODE (var) == FUNCTION_DECL
6258 || TREE_CODE (var) == LABEL_DECL)
6259 /* No expansion needed. */;
6265 /* Expands declarations of variables in list VARS. */
6268 expand_vars (tree vars)
6270 for (; vars; vars = TREE_CHAIN (vars))
6274 if (DECL_EXTERNAL (var))
6278 expand_decl_init (var);
6282 /* Subroutine of expand_expr. Expand the two operands of a binary
6283 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6284 The value may be stored in TARGET if TARGET is nonzero. The
6285 MODIFIER argument is as documented by expand_expr. */
6288 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6289 enum expand_modifier modifier)
6291 if (! safe_from_p (target, exp1, 1))
6293 if (operand_equal_p (exp0, exp1, 0))
6295 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6296 *op1 = copy_rtx (*op0);
6300 /* If we need to preserve evaluation order, copy exp0 into its own
6301 temporary variable so that it can't be clobbered by exp1. */
6302 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6303 exp0 = save_expr (exp0);
6304 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6305 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6310 /* expand_expr: generate code for computing expression EXP.
6311 An rtx for the computed value is returned. The value is never null.
6312 In the case of a void EXP, const0_rtx is returned.
6314 The value may be stored in TARGET if TARGET is nonzero.
6315 TARGET is just a suggestion; callers must assume that
6316 the rtx returned may not be the same as TARGET.
6318 If TARGET is CONST0_RTX, it means that the value will be ignored.
6320 If TMODE is not VOIDmode, it suggests generating the
6321 result in mode TMODE. But this is done only when convenient.
6322 Otherwise, TMODE is ignored and the value generated in its natural mode.
6323 TMODE is just a suggestion; callers must assume that
6324 the rtx returned may not have mode TMODE.
6326 Note that TARGET may have neither TMODE nor MODE. In that case, it
6327 probably will not be used.
6329 If MODIFIER is EXPAND_SUM then when EXP is an addition
6330 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6331 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6332 products as above, or REG or MEM, or constant.
6333 Ordinarily in such cases we would output mul or add instructions
6334 and then return a pseudo reg containing the sum.
6336 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6337 it also marks a label as absolutely required (it can't be dead).
6338 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6339 This is used for outputting expressions used in initializers.
6341 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6342 with a constant address even if that address is not normally legitimate.
6343 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6345 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6346 a call parameter. Such targets require special care as we haven't yet
6347 marked TARGET so that it's safe from being trashed by libcalls. We
6348 don't want to use TARGET for anything but the final result;
6349 Intermediate values must go elsewhere. Additionally, calls to
6350 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6352 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6353 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6354 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
6355 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6358 static rtx expand_expr_real_1 (tree, rtx, enum machine_mode,
6359 enum expand_modifier, rtx *);
6362 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
6363 enum expand_modifier modifier, rtx *alt_rtl)
6366 rtx ret, last = NULL;
6368 /* Handle ERROR_MARK before anybody tries to access its type. */
6369 if (TREE_CODE (exp) == ERROR_MARK
6370 || TREE_CODE (TREE_TYPE (exp)) == ERROR_MARK)
6372 ret = CONST0_RTX (tmode);
6373 return ret ? ret : const0_rtx;
6376 if (flag_non_call_exceptions)
6378 rn = lookup_stmt_eh_region (exp);
6379 /* If rn < 0, then either (1) tree-ssa not used or (2) doesn't throw. */
6381 last = get_last_insn ();
6384 /* If this is an expression of some kind and it has an associated line
6385 number, then emit the line number before expanding the expression.
6387 We need to save and restore the file and line information so that
6388 errors discovered during expansion are emitted with the right
6389 information. It would be better of the diagnostic routines
6390 used the file/line information embedded in the tree nodes rather
6392 if (cfun && EXPR_HAS_LOCATION (exp))
6394 location_t saved_location = input_location;
6395 input_location = EXPR_LOCATION (exp);
6396 emit_line_note (input_location);
6398 /* Record where the insns produced belong. */
6399 record_block_change (TREE_BLOCK (exp));
6401 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6403 input_location = saved_location;
6407 ret = expand_expr_real_1 (exp, target, tmode, modifier, alt_rtl);
6410 /* If using non-call exceptions, mark all insns that may trap.
6411 expand_call() will mark CALL_INSNs before we get to this code,
6412 but it doesn't handle libcalls, and these may trap. */
6416 for (insn = next_real_insn (last); insn;
6417 insn = next_real_insn (insn))
6419 if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX)
6420 /* If we want exceptions for non-call insns, any
6421 may_trap_p instruction may throw. */
6422 && GET_CODE (PATTERN (insn)) != CLOBBER
6423 && GET_CODE (PATTERN (insn)) != USE
6424 && (CALL_P (insn) || may_trap_p (PATTERN (insn))))
6426 REG_NOTES (insn) = alloc_EXPR_LIST (REG_EH_REGION, GEN_INT (rn),
6436 expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
6437 enum expand_modifier modifier, rtx *alt_rtl)
6440 tree type = TREE_TYPE (exp);
6442 enum machine_mode mode;
6443 enum tree_code code = TREE_CODE (exp);
6445 rtx subtarget, original_target;
6448 bool reduce_bit_field = false;
6449 #define REDUCE_BIT_FIELD(expr) (reduce_bit_field && !ignore \
6450 ? reduce_to_bit_field_precision ((expr), \
6455 mode = TYPE_MODE (type);
6456 unsignedp = TYPE_UNSIGNED (type);
6457 if (lang_hooks.reduce_bit_field_operations
6458 && TREE_CODE (type) == INTEGER_TYPE
6459 && GET_MODE_PRECISION (mode) > TYPE_PRECISION (type))
6461 /* An operation in what may be a bit-field type needs the
6462 result to be reduced to the precision of the bit-field type,
6463 which is narrower than that of the type's mode. */
6464 reduce_bit_field = true;
6465 if (modifier == EXPAND_STACK_PARM)
6469 /* Use subtarget as the target for operand 0 of a binary operation. */
6470 subtarget = get_subtarget (target);
6471 original_target = target;
6472 ignore = (target == const0_rtx
6473 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6474 || code == CONVERT_EXPR || code == COND_EXPR
6475 || code == VIEW_CONVERT_EXPR)
6476 && TREE_CODE (type) == VOID_TYPE));
6478 /* If we are going to ignore this result, we need only do something
6479 if there is a side-effect somewhere in the expression. If there
6480 is, short-circuit the most common cases here. Note that we must
6481 not call expand_expr with anything but const0_rtx in case this
6482 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6486 if (! TREE_SIDE_EFFECTS (exp))
6489 /* Ensure we reference a volatile object even if value is ignored, but
6490 don't do this if all we are doing is taking its address. */
6491 if (TREE_THIS_VOLATILE (exp)
6492 && TREE_CODE (exp) != FUNCTION_DECL
6493 && mode != VOIDmode && mode != BLKmode
6494 && modifier != EXPAND_CONST_ADDRESS)
6496 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6498 temp = copy_to_reg (temp);
6502 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6503 || code == INDIRECT_REF)
6504 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6507 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6508 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6510 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6511 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6514 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6515 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6516 /* If the second operand has no side effects, just evaluate
6518 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6520 else if (code == BIT_FIELD_REF)
6522 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6523 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6524 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6531 /* If will do cse, generate all results into pseudo registers
6532 since 1) that allows cse to find more things
6533 and 2) otherwise cse could produce an insn the machine
6534 cannot support. An exception is a CONSTRUCTOR into a multi-word
6535 MEM: that's much more likely to be most efficient into the MEM.
6536 Another is a CALL_EXPR which must return in memory. */
6538 if (! cse_not_expected && mode != BLKmode && target
6539 && (!REG_P (target) || REGNO (target) < FIRST_PSEUDO_REGISTER)
6540 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
6541 && ! (code == CALL_EXPR && aggregate_value_p (exp, exp)))
6548 tree function = decl_function_context (exp);
6550 temp = label_rtx (exp);
6551 temp = gen_rtx_LABEL_REF (Pmode, temp);
6553 if (function != current_function_decl
6555 LABEL_REF_NONLOCAL_P (temp) = 1;
6557 temp = gen_rtx_MEM (FUNCTION_MODE, temp);
6562 if (!DECL_RTL_SET_P (exp))
6564 error ("%Jprior parameter's size depends on '%D'", exp, exp);
6565 return CONST0_RTX (mode);
6568 /* ... fall through ... */
6571 /* If a static var's type was incomplete when the decl was written,
6572 but the type is complete now, lay out the decl now. */
6573 if (DECL_SIZE (exp) == 0
6574 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6575 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6576 layout_decl (exp, 0);
6578 /* ... fall through ... */
6582 if (DECL_RTL (exp) == 0)
6585 /* Ensure variable marked as used even if it doesn't go through
6586 a parser. If it hasn't be used yet, write out an external
6588 if (! TREE_USED (exp))
6590 assemble_external (exp);
6591 TREE_USED (exp) = 1;
6594 /* Show we haven't gotten RTL for this yet. */
6597 /* Handle variables inherited from containing functions. */
6598 context = decl_function_context (exp);
6600 if (context != 0 && context != current_function_decl
6601 /* If var is static, we don't need a static chain to access it. */
6602 && ! (MEM_P (DECL_RTL (exp))
6603 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6607 /* Mark as non-local and addressable. */
6608 DECL_NONLOCAL (exp) = 1;
6609 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6611 lang_hooks.mark_addressable (exp);
6612 if (!MEM_P (DECL_RTL (exp)))
6614 addr = XEXP (DECL_RTL (exp), 0);
6617 = replace_equiv_address (addr,
6618 fix_lexical_addr (XEXP (addr, 0), exp));
6620 addr = fix_lexical_addr (addr, exp);
6622 temp = replace_equiv_address (DECL_RTL (exp), addr);
6625 /* This is the case of an array whose size is to be determined
6626 from its initializer, while the initializer is still being parsed.
6629 else if (MEM_P (DECL_RTL (exp))
6630 && REG_P (XEXP (DECL_RTL (exp), 0)))
6631 temp = validize_mem (DECL_RTL (exp));
6633 /* If DECL_RTL is memory, we are in the normal case and either
6634 the address is not valid or it is not a register and -fforce-addr
6635 is specified, get the address into a register. */
6637 else if (MEM_P (DECL_RTL (exp))
6638 && modifier != EXPAND_CONST_ADDRESS
6639 && modifier != EXPAND_SUM
6640 && modifier != EXPAND_INITIALIZER
6641 && (! memory_address_p (DECL_MODE (exp),
6642 XEXP (DECL_RTL (exp), 0))
6644 && !REG_P (XEXP (DECL_RTL (exp), 0)))))
6647 *alt_rtl = DECL_RTL (exp);
6648 temp = replace_equiv_address (DECL_RTL (exp),
6649 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6652 /* If we got something, return it. But first, set the alignment
6653 if the address is a register. */
6656 if (MEM_P (temp) && REG_P (XEXP (temp, 0)))
6657 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6662 /* If the mode of DECL_RTL does not match that of the decl, it
6663 must be a promoted value. We return a SUBREG of the wanted mode,
6664 but mark it so that we know that it was already extended. */
6666 if (REG_P (DECL_RTL (exp))
6667 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6669 /* Get the signedness used for this variable. Ensure we get the
6670 same mode we got when the variable was declared. */
6671 if (GET_MODE (DECL_RTL (exp))
6672 != promote_mode (type, DECL_MODE (exp), &unsignedp,
6673 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
6676 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6677 SUBREG_PROMOTED_VAR_P (temp) = 1;
6678 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6682 return DECL_RTL (exp);
6685 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6686 TREE_INT_CST_HIGH (exp), mode);
6688 /* ??? If overflow is set, fold will have done an incomplete job,
6689 which can result in (plus xx (const_int 0)), which can get
6690 simplified by validate_replace_rtx during virtual register
6691 instantiation, which can result in unrecognizable insns.
6692 Avoid this by forcing all overflows into registers. */
6693 if (TREE_CONSTANT_OVERFLOW (exp)
6694 && modifier != EXPAND_INITIALIZER)
6695 temp = force_reg (mode, temp);
6700 return const_vector_from_tree (exp);
6703 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
6706 /* If optimized, generate immediate CONST_DOUBLE
6707 which will be turned into memory by reload if necessary.
6709 We used to force a register so that loop.c could see it. But
6710 this does not allow gen_* patterns to perform optimizations with
6711 the constants. It also produces two insns in cases like "x = 1.0;".
6712 On most machines, floating-point constants are not permitted in
6713 many insns, so we'd end up copying it to a register in any case.
6715 Now, we do the copying in expand_binop, if appropriate. */
6716 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6717 TYPE_MODE (TREE_TYPE (exp)));
6720 /* Handle evaluating a complex constant in a CONCAT target. */
6721 if (original_target && GET_CODE (original_target) == CONCAT)
6723 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6726 rtarg = XEXP (original_target, 0);
6727 itarg = XEXP (original_target, 1);
6729 /* Move the real and imaginary parts separately. */
6730 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
6731 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
6734 emit_move_insn (rtarg, op0);
6736 emit_move_insn (itarg, op1);
6738 return original_target;
6741 /* ... fall through ... */
6744 temp = output_constant_def (exp, 1);
6746 /* temp contains a constant address.
6747 On RISC machines where a constant address isn't valid,
6748 make some insns to get that address into a register. */
6749 if (modifier != EXPAND_CONST_ADDRESS
6750 && modifier != EXPAND_INITIALIZER
6751 && modifier != EXPAND_SUM
6752 && (! memory_address_p (mode, XEXP (temp, 0))
6753 || flag_force_addr))
6754 return replace_equiv_address (temp,
6755 copy_rtx (XEXP (temp, 0)));
6760 tree val = TREE_OPERAND (exp, 0);
6761 rtx ret = expand_expr_real_1 (val, target, tmode, modifier, alt_rtl);
6763 if (TREE_CODE (val) != VAR_DECL || !DECL_ARTIFICIAL (val))
6765 /* We can indeed still hit this case, typically via builtin
6766 expanders calling save_expr immediately before expanding
6767 something. Assume this means that we only have to deal
6768 with non-BLKmode values. */
6769 if (GET_MODE (ret) == BLKmode)
6772 val = build_decl (VAR_DECL, NULL, TREE_TYPE (exp));
6773 DECL_ARTIFICIAL (val) = 1;
6774 TREE_OPERAND (exp, 0) = val;
6776 if (!CONSTANT_P (ret))
6777 ret = copy_to_reg (ret);
6778 SET_DECL_RTL (val, ret);
6787 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6788 TREE_OPERAND (exp, 0)
6789 = lang_hooks.unsave_expr_now (TREE_OPERAND (exp, 0));
6794 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6795 expand_goto (TREE_OPERAND (exp, 0));
6797 expand_computed_goto (TREE_OPERAND (exp, 0));
6800 /* These are lowered during gimplification, so we should never ever
6806 case LABELED_BLOCK_EXPR:
6807 if (LABELED_BLOCK_BODY (exp))
6808 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6809 /* Should perhaps use expand_label, but this is simpler and safer. */
6810 do_pending_stack_adjust ();
6811 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6814 case EXIT_BLOCK_EXPR:
6815 if (EXIT_BLOCK_RETURN (exp))
6816 sorry ("returned value in block_exit_expr");
6817 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6822 tree block = BIND_EXPR_BLOCK (exp);
6825 /* If we're in functions-as-trees mode, this BIND_EXPR represents
6826 the block, so we need to emit NOTE_INSN_BLOCK_* notes. */
6827 mark_ends = (block != NULL_TREE);
6828 expand_start_bindings_and_block (mark_ends ? 0 : 2, block);
6830 /* If VARS have not yet been expanded, expand them now. */
6831 expand_vars (BIND_EXPR_VARS (exp));
6833 /* TARGET was clobbered early in this function. The correct
6834 indicator or whether or not we need the value of this
6835 expression is the IGNORE variable. */
6836 temp = expand_expr (BIND_EXPR_BODY (exp),
6837 ignore ? const0_rtx : target,
6840 expand_end_bindings (BIND_EXPR_VARS (exp), mark_ends, 0);
6846 /* If we don't need the result, just ensure we evaluate any
6852 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6853 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
6858 /* All elts simple constants => refer to a constant in memory. But
6859 if this is a non-BLKmode mode, let it store a field at a time
6860 since that should make a CONST_INT or CONST_DOUBLE when we
6861 fold. Likewise, if we have a target we can use, it is best to
6862 store directly into the target unless the type is large enough
6863 that memcpy will be used. If we are making an initializer and
6864 all operands are constant, put it in memory as well.
6866 FIXME: Avoid trying to fill vector constructors piece-meal.
6867 Output them with output_constant_def below unless we're sure
6868 they're zeros. This should go away when vector initializers
6869 are treated like VECTOR_CST instead of arrays.
6871 else if ((TREE_STATIC (exp)
6872 && ((mode == BLKmode
6873 && ! (target != 0 && safe_from_p (target, exp, 1)))
6874 || TREE_ADDRESSABLE (exp)
6875 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6876 && (! MOVE_BY_PIECES_P
6877 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6879 && ! mostly_zeros_p (exp))))
6880 || ((modifier == EXPAND_INITIALIZER
6881 || modifier == EXPAND_CONST_ADDRESS)
6882 && TREE_CONSTANT (exp)))
6884 rtx constructor = output_constant_def (exp, 1);
6886 if (modifier != EXPAND_CONST_ADDRESS
6887 && modifier != EXPAND_INITIALIZER
6888 && modifier != EXPAND_SUM)
6889 constructor = validize_mem (constructor);
6895 /* Handle calls that pass values in multiple non-contiguous
6896 locations. The Irix 6 ABI has examples of this. */
6897 if (target == 0 || ! safe_from_p (target, exp, 1)
6898 || GET_CODE (target) == PARALLEL
6899 || modifier == EXPAND_STACK_PARM)
6901 = assign_temp (build_qualified_type (type,
6903 | (TREE_READONLY (exp)
6904 * TYPE_QUAL_CONST))),
6905 0, TREE_ADDRESSABLE (exp), 1);
6907 store_constructor (exp, target, 0, int_expr_size (exp));
6913 tree exp1 = TREE_OPERAND (exp, 0);
6915 if (modifier != EXPAND_WRITE)
6919 t = fold_read_from_constant_string (exp);
6921 return expand_expr (t, target, tmode, modifier);
6924 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6925 op0 = memory_address (mode, op0);
6926 temp = gen_rtx_MEM (mode, op0);
6927 set_mem_attributes (temp, exp, 0);
6929 /* If we are writing to this object and its type is a record with
6930 readonly fields, we must mark it as readonly so it will
6931 conflict with readonly references to those fields. */
6932 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
6933 RTX_UNCHANGING_P (temp) = 1;
6940 #ifdef ENABLE_CHECKING
6941 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6946 tree array = TREE_OPERAND (exp, 0);
6947 tree low_bound = array_ref_low_bound (exp);
6948 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6951 /* Optimize the special-case of a zero lower bound.
6953 We convert the low_bound to sizetype to avoid some problems
6954 with constant folding. (E.g. suppose the lower bound is 1,
6955 and its mode is QI. Without the conversion, (ARRAY
6956 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6957 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6959 if (! integer_zerop (low_bound))
6960 index = size_diffop (index, convert (sizetype, low_bound));
6962 /* Fold an expression like: "foo"[2].
6963 This is not done in fold so it won't happen inside &.
6964 Don't fold if this is for wide characters since it's too
6965 difficult to do correctly and this is a very rare case. */
6967 if (modifier != EXPAND_CONST_ADDRESS
6968 && modifier != EXPAND_INITIALIZER
6969 && modifier != EXPAND_MEMORY)
6971 tree t = fold_read_from_constant_string (exp);
6974 return expand_expr (t, target, tmode, modifier);
6977 /* If this is a constant index into a constant array,
6978 just get the value from the array. Handle both the cases when
6979 we have an explicit constructor and when our operand is a variable
6980 that was declared const. */
6982 if (modifier != EXPAND_CONST_ADDRESS
6983 && modifier != EXPAND_INITIALIZER
6984 && modifier != EXPAND_MEMORY
6985 && TREE_CODE (array) == CONSTRUCTOR
6986 && ! TREE_SIDE_EFFECTS (array)
6987 && TREE_CODE (index) == INTEGER_CST
6988 && 0 > compare_tree_int (index,
6989 list_length (CONSTRUCTOR_ELTS
6990 (TREE_OPERAND (exp, 0)))))
6994 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6995 i = TREE_INT_CST_LOW (index);
6996 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
7000 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
7004 else if (optimize >= 1
7005 && modifier != EXPAND_CONST_ADDRESS
7006 && modifier != EXPAND_INITIALIZER
7007 && modifier != EXPAND_MEMORY
7008 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
7009 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
7010 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
7011 && targetm.binds_local_p (array))
7013 if (TREE_CODE (index) == INTEGER_CST)
7015 tree init = DECL_INITIAL (array);
7017 if (TREE_CODE (init) == CONSTRUCTOR)
7021 for (elem = CONSTRUCTOR_ELTS (init);
7023 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
7024 elem = TREE_CHAIN (elem))
7027 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
7028 return expand_expr (fold (TREE_VALUE (elem)), target,
7031 else if (TREE_CODE (init) == STRING_CST
7032 && 0 > compare_tree_int (index,
7033 TREE_STRING_LENGTH (init)))
7035 tree type = TREE_TYPE (TREE_TYPE (init));
7036 enum machine_mode mode = TYPE_MODE (type);
7038 if (GET_MODE_CLASS (mode) == MODE_INT
7039 && GET_MODE_SIZE (mode) == 1)
7040 return gen_int_mode (TREE_STRING_POINTER (init)
7041 [TREE_INT_CST_LOW (index)], mode);
7046 goto normal_inner_ref;
7049 /* If the operand is a CONSTRUCTOR, we can just extract the
7050 appropriate field if it is present. */
7051 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
7055 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
7056 elt = TREE_CHAIN (elt))
7057 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
7058 /* We can normally use the value of the field in the
7059 CONSTRUCTOR. However, if this is a bitfield in
7060 an integral mode that we can fit in a HOST_WIDE_INT,
7061 we must mask only the number of bits in the bitfield,
7062 since this is done implicitly by the constructor. If
7063 the bitfield does not meet either of those conditions,
7064 we can't do this optimization. */
7065 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
7066 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
7068 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
7069 <= HOST_BITS_PER_WIDE_INT))))
7071 if (DECL_BIT_FIELD (TREE_PURPOSE (elt))
7072 && modifier == EXPAND_STACK_PARM)
7074 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
7075 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
7077 HOST_WIDE_INT bitsize
7078 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
7079 enum machine_mode imode
7080 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
7082 if (TYPE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
7084 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7085 op0 = expand_and (imode, op0, op1, target);
7090 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
7093 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7095 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7103 goto normal_inner_ref;
7106 case ARRAY_RANGE_REF:
7109 enum machine_mode mode1;
7110 HOST_WIDE_INT bitsize, bitpos;
7113 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7114 &mode1, &unsignedp, &volatilep);
7117 /* If we got back the original object, something is wrong. Perhaps
7118 we are evaluating an expression too early. In any event, don't
7119 infinitely recurse. */
7123 /* If TEM's type is a union of variable size, pass TARGET to the inner
7124 computation, since it will need a temporary and TARGET is known
7125 to have to do. This occurs in unchecked conversion in Ada. */
7129 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7130 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7132 && modifier != EXPAND_STACK_PARM
7133 ? target : NULL_RTX),
7135 (modifier == EXPAND_INITIALIZER
7136 || modifier == EXPAND_CONST_ADDRESS
7137 || modifier == EXPAND_STACK_PARM)
7138 ? modifier : EXPAND_NORMAL);
7140 /* If this is a constant, put it into a register if it is a
7141 legitimate constant and OFFSET is 0 and memory if it isn't. */
7142 if (CONSTANT_P (op0))
7144 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7145 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7147 op0 = force_reg (mode, op0);
7149 op0 = validize_mem (force_const_mem (mode, op0));
7152 /* Otherwise, if this object not in memory and we either have an
7153 offset or a BLKmode result, put it there. This case can't occur in
7154 C, but can in Ada if we have unchecked conversion of an expression
7155 from a scalar type to an array or record type or for an
7156 ARRAY_RANGE_REF whose type is BLKmode. */
7157 else if (!MEM_P (op0)
7159 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
7161 tree nt = build_qualified_type (TREE_TYPE (tem),
7162 (TYPE_QUALS (TREE_TYPE (tem))
7163 | TYPE_QUAL_CONST));
7164 rtx memloc = assign_temp (nt, 1, 1, 1);
7166 emit_move_insn (memloc, op0);
7172 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7178 #ifdef POINTERS_EXTEND_UNSIGNED
7179 if (GET_MODE (offset_rtx) != Pmode)
7180 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7182 if (GET_MODE (offset_rtx) != ptr_mode)
7183 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7186 if (GET_MODE (op0) == BLKmode
7187 /* A constant address in OP0 can have VOIDmode, we must
7188 not try to call force_reg in that case. */
7189 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7191 && (bitpos % bitsize) == 0
7192 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7193 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7195 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7199 op0 = offset_address (op0, offset_rtx,
7200 highest_pow2_factor (offset));
7203 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7204 record its alignment as BIGGEST_ALIGNMENT. */
7205 if (MEM_P (op0) && bitpos == 0 && offset != 0
7206 && is_aligning_offset (offset, tem))
7207 set_mem_align (op0, BIGGEST_ALIGNMENT);
7209 /* Don't forget about volatility even if this is a bitfield. */
7210 if (MEM_P (op0) && volatilep && ! MEM_VOLATILE_P (op0))
7212 if (op0 == orig_op0)
7213 op0 = copy_rtx (op0);
7215 MEM_VOLATILE_P (op0) = 1;
7218 /* The following code doesn't handle CONCAT.
7219 Assume only bitpos == 0 can be used for CONCAT, due to
7220 one element arrays having the same mode as its element. */
7221 if (GET_CODE (op0) == CONCAT)
7223 if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
7228 /* In cases where an aligned union has an unaligned object
7229 as a field, we might be extracting a BLKmode value from
7230 an integer-mode (e.g., SImode) object. Handle this case
7231 by doing the extract into an object as wide as the field
7232 (which we know to be the width of a basic mode), then
7233 storing into memory, and changing the mode to BLKmode. */
7234 if (mode1 == VOIDmode
7235 || REG_P (op0) || GET_CODE (op0) == SUBREG
7236 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7237 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7238 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7239 && modifier != EXPAND_CONST_ADDRESS
7240 && modifier != EXPAND_INITIALIZER)
7241 /* If the field isn't aligned enough to fetch as a memref,
7242 fetch it as a bit field. */
7243 || (mode1 != BLKmode
7244 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7245 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
7247 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
7248 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
7249 && ((modifier == EXPAND_CONST_ADDRESS
7250 || modifier == EXPAND_INITIALIZER)
7252 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
7253 || (bitpos % BITS_PER_UNIT != 0)))
7254 /* If the type and the field are a constant size and the
7255 size of the type isn't the same size as the bitfield,
7256 we must use bitfield operations. */
7258 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7260 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7263 enum machine_mode ext_mode = mode;
7265 if (ext_mode == BLKmode
7266 && ! (target != 0 && MEM_P (op0)
7268 && bitpos % BITS_PER_UNIT == 0))
7269 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7271 if (ext_mode == BLKmode)
7274 target = assign_temp (type, 0, 1, 1);
7279 /* In this case, BITPOS must start at a byte boundary and
7280 TARGET, if specified, must be a MEM. */
7282 || (target != 0 && !MEM_P (target))
7283 || bitpos % BITS_PER_UNIT != 0)
7286 emit_block_move (target,
7287 adjust_address (op0, VOIDmode,
7288 bitpos / BITS_PER_UNIT),
7289 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7291 (modifier == EXPAND_STACK_PARM
7292 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7297 op0 = validize_mem (op0);
7299 if (MEM_P (op0) && REG_P (XEXP (op0, 0)))
7300 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7302 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7303 (modifier == EXPAND_STACK_PARM
7304 ? NULL_RTX : target),
7305 ext_mode, ext_mode);
7307 /* If the result is a record type and BITSIZE is narrower than
7308 the mode of OP0, an integral mode, and this is a big endian
7309 machine, we must put the field into the high-order bits. */
7310 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7311 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7312 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7313 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7314 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7318 /* If the result type is BLKmode, store the data into a temporary
7319 of the appropriate type, but with the mode corresponding to the
7320 mode for the data we have (op0's mode). It's tempting to make
7321 this a constant type, since we know it's only being stored once,
7322 but that can cause problems if we are taking the address of this
7323 COMPONENT_REF because the MEM of any reference via that address
7324 will have flags corresponding to the type, which will not
7325 necessarily be constant. */
7326 if (mode == BLKmode)
7329 = assign_stack_temp_for_type
7330 (ext_mode, GET_MODE_BITSIZE (ext_mode), 0, type);
7332 emit_move_insn (new, op0);
7333 op0 = copy_rtx (new);
7334 PUT_MODE (op0, BLKmode);
7335 set_mem_attributes (op0, exp, 1);
7341 /* If the result is BLKmode, use that to access the object
7343 if (mode == BLKmode)
7346 /* Get a reference to just this component. */
7347 if (modifier == EXPAND_CONST_ADDRESS
7348 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7349 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7351 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7353 if (op0 == orig_op0)
7354 op0 = copy_rtx (op0);
7356 set_mem_attributes (op0, exp, 0);
7357 if (REG_P (XEXP (op0, 0)))
7358 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7360 MEM_VOLATILE_P (op0) |= volatilep;
7361 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7362 || modifier == EXPAND_CONST_ADDRESS
7363 || modifier == EXPAND_INITIALIZER)
7365 else if (target == 0)
7366 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7368 convert_move (target, op0, unsignedp);
7373 return expand_expr (OBJ_TYPE_REF_EXPR (exp), target, tmode, modifier);
7376 /* Check for a built-in function. */
7377 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7378 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7380 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7382 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7383 == BUILT_IN_FRONTEND)
7384 return lang_hooks.expand_expr (exp, original_target,
7388 return expand_builtin (exp, target, subtarget, tmode, ignore);
7391 return expand_call (exp, target, ignore);
7393 case NON_LVALUE_EXPR:
7396 if (TREE_OPERAND (exp, 0) == error_mark_node)
7399 if (TREE_CODE (type) == UNION_TYPE)
7401 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7403 /* If both input and output are BLKmode, this conversion isn't doing
7404 anything except possibly changing memory attribute. */
7405 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7407 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7410 result = copy_rtx (result);
7411 set_mem_attributes (result, exp, 0);
7417 if (TYPE_MODE (type) != BLKmode)
7418 target = gen_reg_rtx (TYPE_MODE (type));
7420 target = assign_temp (type, 0, 1, 1);
7424 /* Store data into beginning of memory target. */
7425 store_expr (TREE_OPERAND (exp, 0),
7426 adjust_address (target, TYPE_MODE (valtype), 0),
7427 modifier == EXPAND_STACK_PARM ? 2 : 0);
7429 else if (REG_P (target))
7430 /* Store this field into a union of the proper type. */
7431 store_field (target,
7432 MIN ((int_size_in_bytes (TREE_TYPE
7433 (TREE_OPERAND (exp, 0)))
7435 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7436 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7437 VOIDmode, 0, type, 0);
7441 /* Return the entire union. */
7445 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7447 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7450 /* If the signedness of the conversion differs and OP0 is
7451 a promoted SUBREG, clear that indication since we now
7452 have to do the proper extension. */
7453 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7454 && GET_CODE (op0) == SUBREG)
7455 SUBREG_PROMOTED_VAR_P (op0) = 0;
7457 return REDUCE_BIT_FIELD (op0);
7460 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7461 op0 = REDUCE_BIT_FIELD (op0);
7462 if (GET_MODE (op0) == mode)
7465 /* If OP0 is a constant, just convert it into the proper mode. */
7466 if (CONSTANT_P (op0))
7468 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7469 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7471 if (modifier == EXPAND_INITIALIZER)
7472 return simplify_gen_subreg (mode, op0, inner_mode,
7473 subreg_lowpart_offset (mode,
7476 return convert_modes (mode, inner_mode, op0,
7477 TYPE_UNSIGNED (inner_type));
7480 if (modifier == EXPAND_INITIALIZER)
7481 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7485 convert_to_mode (mode, op0,
7486 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7488 convert_move (target, op0,
7489 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7492 case VIEW_CONVERT_EXPR:
7493 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7495 /* If the input and output modes are both the same, we are done.
7496 Otherwise, if neither mode is BLKmode and both are integral and within
7497 a word, we can use gen_lowpart. If neither is true, make sure the
7498 operand is in memory and convert the MEM to the new mode. */
7499 if (TYPE_MODE (type) == GET_MODE (op0))
7501 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7502 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7503 && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT
7504 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7505 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7506 op0 = gen_lowpart (TYPE_MODE (type), op0);
7507 else if (!MEM_P (op0))
7509 /* If the operand is not a MEM, force it into memory. Since we
7510 are going to be be changing the mode of the MEM, don't call
7511 force_const_mem for constants because we don't allow pool
7512 constants to change mode. */
7513 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7515 if (TREE_ADDRESSABLE (exp))
7518 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7520 = assign_stack_temp_for_type
7521 (TYPE_MODE (inner_type),
7522 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7524 emit_move_insn (target, op0);
7528 /* At this point, OP0 is in the correct mode. If the output type is such
7529 that the operand is known to be aligned, indicate that it is.
7530 Otherwise, we need only be concerned about alignment for non-BLKmode
7534 op0 = copy_rtx (op0);
7536 if (TYPE_ALIGN_OK (type))
7537 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7538 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7539 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7541 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7542 HOST_WIDE_INT temp_size
7543 = MAX (int_size_in_bytes (inner_type),
7544 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7545 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7546 temp_size, 0, type);
7547 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7549 if (TREE_ADDRESSABLE (exp))
7552 if (GET_MODE (op0) == BLKmode)
7553 emit_block_move (new_with_op0_mode, op0,
7554 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7555 (modifier == EXPAND_STACK_PARM
7556 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7558 emit_move_insn (new_with_op0_mode, op0);
7563 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7569 this_optab = ! unsignedp && flag_trapv
7570 && (GET_MODE_CLASS (mode) == MODE_INT)
7571 ? addv_optab : add_optab;
7573 /* If we are adding a constant, a VAR_DECL that is sp, fp, or ap, and
7574 something else, make sure we add the register to the constant and
7575 then to the other thing. This case can occur during strength
7576 reduction and doing it this way will produce better code if the
7577 frame pointer or argument pointer is eliminated.
7579 fold-const.c will ensure that the constant is always in the inner
7580 PLUS_EXPR, so the only case we need to do anything about is if
7581 sp, ap, or fp is our second argument, in which case we must swap
7582 the innermost first argument and our second argument. */
7584 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7585 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7586 && TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL
7587 && (DECL_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7588 || DECL_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7589 || DECL_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7591 tree t = TREE_OPERAND (exp, 1);
7593 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7594 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7597 /* If the result is to be ptr_mode and we are adding an integer to
7598 something, we might be forming a constant. So try to use
7599 plus_constant. If it produces a sum and we can't accept it,
7600 use force_operand. This allows P = &ARR[const] to generate
7601 efficient code on machines where a SYMBOL_REF is not a valid
7604 If this is an EXPAND_SUM call, always return the sum. */
7605 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7606 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7608 if (modifier == EXPAND_STACK_PARM)
7610 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7611 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7612 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7616 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7618 /* Use immed_double_const to ensure that the constant is
7619 truncated according to the mode of OP1, then sign extended
7620 to a HOST_WIDE_INT. Using the constant directly can result
7621 in non-canonical RTL in a 64x32 cross compile. */
7623 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7625 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7626 op1 = plus_constant (op1, INTVAL (constant_part));
7627 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7628 op1 = force_operand (op1, target);
7629 return REDUCE_BIT_FIELD (op1);
7632 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7633 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7634 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7638 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7639 (modifier == EXPAND_INITIALIZER
7640 ? EXPAND_INITIALIZER : EXPAND_SUM));
7641 if (! CONSTANT_P (op0))
7643 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7644 VOIDmode, modifier);
7645 /* Return a PLUS if modifier says it's OK. */
7646 if (modifier == EXPAND_SUM
7647 || modifier == EXPAND_INITIALIZER)
7648 return simplify_gen_binary (PLUS, mode, op0, op1);
7651 /* Use immed_double_const to ensure that the constant is
7652 truncated according to the mode of OP1, then sign extended
7653 to a HOST_WIDE_INT. Using the constant directly can result
7654 in non-canonical RTL in a 64x32 cross compile. */
7656 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7658 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7659 op0 = plus_constant (op0, INTVAL (constant_part));
7660 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7661 op0 = force_operand (op0, target);
7662 return REDUCE_BIT_FIELD (op0);
7666 /* No sense saving up arithmetic to be done
7667 if it's all in the wrong mode to form part of an address.
7668 And force_operand won't know whether to sign-extend or
7670 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7671 || mode != ptr_mode)
7673 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7674 subtarget, &op0, &op1, 0);
7675 if (op0 == const0_rtx)
7677 if (op1 == const0_rtx)
7682 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7683 subtarget, &op0, &op1, modifier);
7684 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7687 /* For initializers, we are allowed to return a MINUS of two
7688 symbolic constants. Here we handle all cases when both operands
7690 /* Handle difference of two symbolic constants,
7691 for the sake of an initializer. */
7692 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7693 && really_constant_p (TREE_OPERAND (exp, 0))
7694 && really_constant_p (TREE_OPERAND (exp, 1)))
7696 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7697 NULL_RTX, &op0, &op1, modifier);
7699 /* If the last operand is a CONST_INT, use plus_constant of
7700 the negated constant. Else make the MINUS. */
7701 if (GET_CODE (op1) == CONST_INT)
7702 return REDUCE_BIT_FIELD (plus_constant (op0, - INTVAL (op1)));
7704 return REDUCE_BIT_FIELD (gen_rtx_MINUS (mode, op0, op1));
7707 this_optab = ! unsignedp && flag_trapv
7708 && (GET_MODE_CLASS(mode) == MODE_INT)
7709 ? subv_optab : sub_optab;
7711 /* No sense saving up arithmetic to be done
7712 if it's all in the wrong mode to form part of an address.
7713 And force_operand won't know whether to sign-extend or
7715 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7716 || mode != ptr_mode)
7719 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7720 subtarget, &op0, &op1, modifier);
7722 /* Convert A - const to A + (-const). */
7723 if (GET_CODE (op1) == CONST_INT)
7725 op1 = negate_rtx (mode, op1);
7726 return REDUCE_BIT_FIELD (simplify_gen_binary (PLUS, mode, op0, op1));
7732 /* If first operand is constant, swap them.
7733 Thus the following special case checks need only
7734 check the second operand. */
7735 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7737 tree t1 = TREE_OPERAND (exp, 0);
7738 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7739 TREE_OPERAND (exp, 1) = t1;
7742 /* Attempt to return something suitable for generating an
7743 indexed address, for machines that support that. */
7745 if (modifier == EXPAND_SUM && mode == ptr_mode
7746 && host_integerp (TREE_OPERAND (exp, 1), 0))
7748 tree exp1 = TREE_OPERAND (exp, 1);
7750 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7754 op0 = force_operand (op0, NULL_RTX);
7756 op0 = copy_to_mode_reg (mode, op0);
7758 return REDUCE_BIT_FIELD (gen_rtx_MULT (mode, op0,
7759 gen_int_mode (tree_low_cst (exp1, 0),
7760 TYPE_MODE (TREE_TYPE (exp1)))));
7763 if (modifier == EXPAND_STACK_PARM)
7766 /* Check for multiplying things that have been extended
7767 from a narrower type. If this machine supports multiplying
7768 in that narrower type with a result in the desired type,
7769 do it that way, and avoid the explicit type-conversion. */
7770 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7771 && TREE_CODE (type) == INTEGER_TYPE
7772 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7773 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7774 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7775 && int_fits_type_p (TREE_OPERAND (exp, 1),
7776 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7777 /* Don't use a widening multiply if a shift will do. */
7778 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7779 > HOST_BITS_PER_WIDE_INT)
7780 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7782 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7783 && (TYPE_PRECISION (TREE_TYPE
7784 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7785 == TYPE_PRECISION (TREE_TYPE
7787 (TREE_OPERAND (exp, 0), 0))))
7788 /* If both operands are extended, they must either both
7789 be zero-extended or both be sign-extended. */
7790 && (TYPE_UNSIGNED (TREE_TYPE
7791 (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7792 == TYPE_UNSIGNED (TREE_TYPE
7794 (TREE_OPERAND (exp, 0), 0)))))))
7796 tree op0type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0));
7797 enum machine_mode innermode = TYPE_MODE (op0type);
7798 bool zextend_p = TYPE_UNSIGNED (op0type);
7799 optab other_optab = zextend_p ? smul_widen_optab : umul_widen_optab;
7800 this_optab = zextend_p ? umul_widen_optab : smul_widen_optab;
7802 if (mode == GET_MODE_WIDER_MODE (innermode))
7804 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7806 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7807 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7808 TREE_OPERAND (exp, 1),
7809 NULL_RTX, &op0, &op1, 0);
7811 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7812 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7813 NULL_RTX, &op0, &op1, 0);
7816 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7817 && innermode == word_mode)
7820 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7821 NULL_RTX, VOIDmode, 0);
7822 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7823 op1 = convert_modes (innermode, mode,
7824 expand_expr (TREE_OPERAND (exp, 1),
7825 NULL_RTX, VOIDmode, 0),
7828 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7829 NULL_RTX, VOIDmode, 0);
7830 temp = expand_binop (mode, other_optab, op0, op1, target,
7831 unsignedp, OPTAB_LIB_WIDEN);
7832 hipart = gen_highpart (innermode, temp);
7833 htem = expand_mult_highpart_adjust (innermode, hipart,
7837 emit_move_insn (hipart, htem);
7838 return REDUCE_BIT_FIELD (temp);
7842 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7843 subtarget, &op0, &op1, 0);
7844 return REDUCE_BIT_FIELD (expand_mult (mode, op0, op1, target, unsignedp));
7846 case TRUNC_DIV_EXPR:
7847 case FLOOR_DIV_EXPR:
7849 case ROUND_DIV_EXPR:
7850 case EXACT_DIV_EXPR:
7851 if (modifier == EXPAND_STACK_PARM)
7853 /* Possible optimization: compute the dividend with EXPAND_SUM
7854 then if the divisor is constant can optimize the case
7855 where some terms of the dividend have coeffs divisible by it. */
7856 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7857 subtarget, &op0, &op1, 0);
7858 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7861 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7862 expensive divide. If not, combine will rebuild the original
7864 if (flag_unsafe_math_optimizations && optimize && !optimize_size
7865 && TREE_CODE (type) == REAL_TYPE
7866 && !real_onep (TREE_OPERAND (exp, 0)))
7867 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7868 build (RDIV_EXPR, type,
7869 build_real (type, dconst1),
7870 TREE_OPERAND (exp, 1))),
7871 target, tmode, modifier);
7872 this_optab = sdiv_optab;
7875 case TRUNC_MOD_EXPR:
7876 case FLOOR_MOD_EXPR:
7878 case ROUND_MOD_EXPR:
7879 if (modifier == EXPAND_STACK_PARM)
7881 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7882 subtarget, &op0, &op1, 0);
7883 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7885 case FIX_ROUND_EXPR:
7886 case FIX_FLOOR_EXPR:
7888 abort (); /* Not used for C. */
7890 case FIX_TRUNC_EXPR:
7891 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7892 if (target == 0 || modifier == EXPAND_STACK_PARM)
7893 target = gen_reg_rtx (mode);
7894 expand_fix (target, op0, unsignedp);
7898 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7899 if (target == 0 || modifier == EXPAND_STACK_PARM)
7900 target = gen_reg_rtx (mode);
7901 /* expand_float can't figure out what to do if FROM has VOIDmode.
7902 So give it the correct mode. With -O, cse will optimize this. */
7903 if (GET_MODE (op0) == VOIDmode)
7904 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7906 expand_float (target, op0,
7907 TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7911 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7912 if (modifier == EXPAND_STACK_PARM)
7914 temp = expand_unop (mode,
7915 ! unsignedp && flag_trapv
7916 && (GET_MODE_CLASS(mode) == MODE_INT)
7917 ? negv_optab : neg_optab, op0, target, 0);
7920 return REDUCE_BIT_FIELD (temp);
7923 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7924 if (modifier == EXPAND_STACK_PARM)
7927 /* ABS_EXPR is not valid for complex arguments. */
7928 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7929 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7932 /* Unsigned abs is simply the operand. Testing here means we don't
7933 risk generating incorrect code below. */
7934 if (TYPE_UNSIGNED (type))
7937 return expand_abs (mode, op0, target, unsignedp,
7938 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
7942 target = original_target;
7944 || modifier == EXPAND_STACK_PARM
7945 || (MEM_P (target) && MEM_VOLATILE_P (target))
7946 || GET_MODE (target) != mode
7948 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7949 target = gen_reg_rtx (mode);
7950 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7951 target, &op0, &op1, 0);
7953 /* First try to do it with a special MIN or MAX instruction.
7954 If that does not win, use a conditional jump to select the proper
7956 this_optab = (unsignedp
7957 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7958 : (code == MIN_EXPR ? smin_optab : smax_optab));
7960 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7965 /* At this point, a MEM target is no longer useful; we will get better
7969 target = gen_reg_rtx (mode);
7971 /* If op1 was placed in target, swap op0 and op1. */
7972 if (target != op0 && target == op1)
7980 emit_move_insn (target, op0);
7982 op0 = gen_label_rtx ();
7984 /* If this mode is an integer too wide to compare properly,
7985 compare word by word. Rely on cse to optimize constant cases. */
7986 if (GET_MODE_CLASS (mode) == MODE_INT
7987 && ! can_compare_p (GE, mode, ccp_jump))
7989 if (code == MAX_EXPR)
7990 do_jump_by_parts_greater_rtx (mode, unsignedp, target, op1,
7993 do_jump_by_parts_greater_rtx (mode, unsignedp, op1, target,
7998 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7999 unsignedp, mode, NULL_RTX, NULL_RTX, op0);
8001 emit_move_insn (target, op1);
8006 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8007 if (modifier == EXPAND_STACK_PARM)
8009 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8014 /* ??? Can optimize bitwise operations with one arg constant.
8015 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8016 and (a bitwise1 b) bitwise2 b (etc)
8017 but that is probably not worth while. */
8019 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8020 boolean values when we want in all cases to compute both of them. In
8021 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8022 as actual zero-or-1 values and then bitwise anding. In cases where
8023 there cannot be any side effects, better code would be made by
8024 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8025 how to recognize those cases. */
8027 case TRUTH_AND_EXPR:
8029 this_optab = and_optab;
8034 this_optab = ior_optab;
8037 case TRUTH_XOR_EXPR:
8039 this_optab = xor_optab;
8046 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8048 if (modifier == EXPAND_STACK_PARM)
8050 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8051 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8054 /* Could determine the answer when only additive constants differ. Also,
8055 the addition of one can be handled by changing the condition. */
8062 case UNORDERED_EXPR:
8070 temp = do_store_flag (exp,
8071 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8072 tmode != VOIDmode ? tmode : mode, 0);
8076 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8077 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8079 && REG_P (original_target)
8080 && (GET_MODE (original_target)
8081 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8083 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8086 /* If temp is constant, we can just compute the result. */
8087 if (GET_CODE (temp) == CONST_INT)
8089 if (INTVAL (temp) != 0)
8090 emit_move_insn (target, const1_rtx);
8092 emit_move_insn (target, const0_rtx);
8097 if (temp != original_target)
8099 enum machine_mode mode1 = GET_MODE (temp);
8100 if (mode1 == VOIDmode)
8101 mode1 = tmode != VOIDmode ? tmode : mode;
8103 temp = copy_to_mode_reg (mode1, temp);
8106 op1 = gen_label_rtx ();
8107 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8108 GET_MODE (temp), unsignedp, op1);
8109 emit_move_insn (temp, const1_rtx);
8114 /* If no set-flag instruction, must generate a conditional
8115 store into a temporary variable. Drop through
8116 and handle this like && and ||. */
8118 case TRUTH_ANDIF_EXPR:
8119 case TRUTH_ORIF_EXPR:
8122 || modifier == EXPAND_STACK_PARM
8123 || ! safe_from_p (target, exp, 1)
8124 /* Make sure we don't have a hard reg (such as function's return
8125 value) live across basic blocks, if not optimizing. */
8126 || (!optimize && REG_P (target)
8127 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8128 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8131 emit_clr_insn (target);
8133 op1 = gen_label_rtx ();
8134 jumpifnot (exp, op1);
8137 emit_0_to_1_insn (target);
8140 return ignore ? const0_rtx : target;
8142 case TRUTH_NOT_EXPR:
8143 if (modifier == EXPAND_STACK_PARM)
8145 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8146 /* The parser is careful to generate TRUTH_NOT_EXPR
8147 only with operands that are always zero or one. */
8148 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8149 target, 1, OPTAB_LIB_WIDEN);
8155 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8157 return expand_expr_real (TREE_OPERAND (exp, 1),
8158 (ignore ? const0_rtx : target),
8159 VOIDmode, modifier, alt_rtl);
8161 case STATEMENT_LIST:
8163 tree_stmt_iterator iter;
8168 for (iter = tsi_start (exp); !tsi_end_p (iter); tsi_next (&iter))
8169 expand_expr (tsi_stmt (iter), const0_rtx, VOIDmode, modifier);
8174 /* If it's void, we don't need to worry about computing a value. */
8175 if (VOID_TYPE_P (TREE_TYPE (exp)))
8177 tree pred = TREE_OPERAND (exp, 0);
8178 tree then_ = TREE_OPERAND (exp, 1);
8179 tree else_ = TREE_OPERAND (exp, 2);
8181 if (TREE_CODE (then_) == GOTO_EXPR
8182 && TREE_CODE (GOTO_DESTINATION (then_)) == LABEL_DECL)
8184 jumpif (pred, label_rtx (GOTO_DESTINATION (then_)));
8185 return expand_expr (else_, const0_rtx, VOIDmode, 0);
8187 else if (TREE_CODE (else_) == GOTO_EXPR
8188 && TREE_CODE (GOTO_DESTINATION (else_)) == LABEL_DECL)
8190 jumpifnot (pred, label_rtx (GOTO_DESTINATION (else_)));
8191 return expand_expr (then_, const0_rtx, VOIDmode, 0);
8194 /* Just use the 'if' machinery. */
8195 expand_start_cond (pred, 0);
8196 expand_expr (then_, const0_rtx, VOIDmode, 0);
8200 /* Iterate over 'else if's instead of recursing. */
8201 for (; TREE_CODE (exp) == COND_EXPR; exp = TREE_OPERAND (exp, 2))
8203 expand_start_else ();
8204 if (EXPR_HAS_LOCATION (exp))
8206 emit_line_note (EXPR_LOCATION (exp));
8207 record_block_change (TREE_BLOCK (exp));
8209 expand_elseif (TREE_OPERAND (exp, 0));
8210 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, 0);
8212 /* Don't emit the jump and label if there's no 'else' clause. */
8213 if (TREE_SIDE_EFFECTS (exp))
8215 expand_start_else ();
8216 expand_expr (exp, const0_rtx, VOIDmode, 0);
8222 /* If we would have a "singleton" (see below) were it not for a
8223 conversion in each arm, bring that conversion back out. */
8224 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8225 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8226 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8227 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8229 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8230 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8232 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8233 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8234 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8235 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8236 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8237 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8238 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8239 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8240 return expand_expr (build1 (NOP_EXPR, type,
8241 build (COND_EXPR, TREE_TYPE (iftrue),
8242 TREE_OPERAND (exp, 0),
8244 target, tmode, modifier);
8248 /* Note that COND_EXPRs whose type is a structure or union
8249 are required to be constructed to contain assignments of
8250 a temporary variable, so that we can evaluate them here
8251 for side effect only. If type is void, we must do likewise. */
8253 /* If an arm of the branch requires a cleanup,
8254 only that cleanup is performed. */
8257 tree binary_op = 0, unary_op = 0;
8259 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8260 convert it to our mode, if necessary. */
8261 if (integer_onep (TREE_OPERAND (exp, 1))
8262 && integer_zerop (TREE_OPERAND (exp, 2))
8263 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8267 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8272 if (modifier == EXPAND_STACK_PARM)
8274 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
8275 if (GET_MODE (op0) == mode)
8279 target = gen_reg_rtx (mode);
8280 convert_move (target, op0, unsignedp);
8284 /* Check for X ? A + B : A. If we have this, we can copy A to the
8285 output and conditionally add B. Similarly for unary operations.
8286 Don't do this if X has side-effects because those side effects
8287 might affect A or B and the "?" operation is a sequence point in
8288 ANSI. (operand_equal_p tests for side effects.) */
8290 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8291 && operand_equal_p (TREE_OPERAND (exp, 2),
8292 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8293 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8294 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8295 && operand_equal_p (TREE_OPERAND (exp, 1),
8296 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8297 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8298 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8299 && operand_equal_p (TREE_OPERAND (exp, 2),
8300 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8301 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8302 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8303 && operand_equal_p (TREE_OPERAND (exp, 1),
8304 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8305 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8307 /* If we are not to produce a result, we have no target. Otherwise,
8308 if a target was specified use it; it will not be used as an
8309 intermediate target unless it is safe. If no target, use a
8314 else if (modifier == EXPAND_STACK_PARM)
8315 temp = assign_temp (type, 0, 0, 1);
8316 else if (original_target
8317 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8318 || (singleton && REG_P (original_target)
8319 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8320 && original_target == var_rtx (singleton)))
8321 && GET_MODE (original_target) == mode
8322 #ifdef HAVE_conditional_move
8323 && (! can_conditionally_move_p (mode)
8324 || REG_P (original_target)
8325 || TREE_ADDRESSABLE (type))
8327 && (!MEM_P (original_target)
8328 || TREE_ADDRESSABLE (type)))
8329 temp = original_target;
8330 else if (TREE_ADDRESSABLE (type))
8333 temp = assign_temp (type, 0, 0, 1);
8335 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8336 do the test of X as a store-flag operation, do this as
8337 A + ((X != 0) << log C). Similarly for other simple binary
8338 operators. Only do for C == 1 if BRANCH_COST is low. */
8339 if (temp && singleton && binary_op
8340 && (TREE_CODE (binary_op) == PLUS_EXPR
8341 || TREE_CODE (binary_op) == MINUS_EXPR
8342 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8343 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8344 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8345 : integer_onep (TREE_OPERAND (binary_op, 1)))
8346 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8350 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8351 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8352 ? addv_optab : add_optab)
8353 : TREE_CODE (binary_op) == MINUS_EXPR
8354 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8355 ? subv_optab : sub_optab)
8356 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8359 /* If we had X ? A : A + 1, do this as A + (X == 0). */
8360 if (singleton == TREE_OPERAND (exp, 1))
8361 cond = invert_truthvalue (TREE_OPERAND (exp, 0));
8363 cond = TREE_OPERAND (exp, 0);
8365 result = do_store_flag (cond, (safe_from_p (temp, singleton, 1)
8367 mode, BRANCH_COST <= 1);
8369 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8370 result = expand_shift (LSHIFT_EXPR, mode, result,
8371 build_int_2 (tree_log2
8375 (safe_from_p (temp, singleton, 1)
8376 ? temp : NULL_RTX), 0);
8380 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8381 return expand_binop (mode, boptab, op1, result, temp,
8382 unsignedp, OPTAB_LIB_WIDEN);
8386 do_pending_stack_adjust ();
8388 op0 = gen_label_rtx ();
8390 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8394 /* If the target conflicts with the other operand of the
8395 binary op, we can't use it. Also, we can't use the target
8396 if it is a hard register, because evaluating the condition
8397 might clobber it. */
8399 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8401 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8402 temp = gen_reg_rtx (mode);
8403 store_expr (singleton, temp,
8404 modifier == EXPAND_STACK_PARM ? 2 : 0);
8407 expand_expr (singleton,
8408 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8409 if (singleton == TREE_OPERAND (exp, 1))
8410 jumpif (TREE_OPERAND (exp, 0), op0);
8412 jumpifnot (TREE_OPERAND (exp, 0), op0);
8414 if (binary_op && temp == 0)
8415 /* Just touch the other operand. */
8416 expand_expr (TREE_OPERAND (binary_op, 1),
8417 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8419 store_expr (build (TREE_CODE (binary_op), type,
8420 make_tree (type, temp),
8421 TREE_OPERAND (binary_op, 1)),
8422 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8424 store_expr (build1 (TREE_CODE (unary_op), type,
8425 make_tree (type, temp)),
8426 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8429 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8430 comparison operator. If we have one of these cases, set the
8431 output to A, branch on A (cse will merge these two references),
8432 then set the output to FOO. */
8434 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8435 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8436 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8437 TREE_OPERAND (exp, 1), 0)
8438 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8439 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8440 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8443 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8444 temp = gen_reg_rtx (mode);
8445 store_expr (TREE_OPERAND (exp, 1), temp,
8446 modifier == EXPAND_STACK_PARM ? 2 : 0);
8447 jumpif (TREE_OPERAND (exp, 0), op0);
8449 if (TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8450 store_expr (TREE_OPERAND (exp, 2), temp,
8451 modifier == EXPAND_STACK_PARM ? 2 : 0);
8453 expand_expr (TREE_OPERAND (exp, 2),
8454 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8458 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8459 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8460 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8461 TREE_OPERAND (exp, 2), 0)
8462 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8463 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8464 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8467 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8468 temp = gen_reg_rtx (mode);
8469 store_expr (TREE_OPERAND (exp, 2), temp,
8470 modifier == EXPAND_STACK_PARM ? 2 : 0);
8471 jumpifnot (TREE_OPERAND (exp, 0), op0);
8473 if (TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8474 store_expr (TREE_OPERAND (exp, 1), temp,
8475 modifier == EXPAND_STACK_PARM ? 2 : 0);
8477 expand_expr (TREE_OPERAND (exp, 1),
8478 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8483 op1 = gen_label_rtx ();
8484 jumpifnot (TREE_OPERAND (exp, 0), op0);
8486 /* One branch of the cond can be void, if it never returns. For
8487 example A ? throw : E */
8489 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8490 store_expr (TREE_OPERAND (exp, 1), temp,
8491 modifier == EXPAND_STACK_PARM ? 2 : 0);
8493 expand_expr (TREE_OPERAND (exp, 1),
8494 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8496 emit_jump_insn (gen_jump (op1));
8500 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8501 store_expr (TREE_OPERAND (exp, 2), temp,
8502 modifier == EXPAND_STACK_PARM ? 2 : 0);
8504 expand_expr (TREE_OPERAND (exp, 2),
8505 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8517 tree lhs = TREE_OPERAND (exp, 0);
8518 tree rhs = TREE_OPERAND (exp, 1);
8520 temp = expand_assignment (lhs, rhs, ! ignore);
8526 /* If lhs is complex, expand calls in rhs before computing it.
8527 That's so we don't compute a pointer and save it over a
8528 call. If lhs is simple, compute it first so we can give it
8529 as a target if the rhs is just a call. This avoids an
8530 extra temp and copy and that prevents a partial-subsumption
8531 which makes bad code. Actually we could treat
8532 component_ref's of vars like vars. */
8534 tree lhs = TREE_OPERAND (exp, 0);
8535 tree rhs = TREE_OPERAND (exp, 1);
8539 /* Check for |= or &= of a bitfield of size one into another bitfield
8540 of size 1. In this case, (unless we need the result of the
8541 assignment) we can do this more efficiently with a
8542 test followed by an assignment, if necessary.
8544 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8545 things change so we do, this code should be enhanced to
8548 && TREE_CODE (lhs) == COMPONENT_REF
8549 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8550 || TREE_CODE (rhs) == BIT_AND_EXPR)
8551 && TREE_OPERAND (rhs, 0) == lhs
8552 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8553 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8554 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8556 rtx label = gen_label_rtx ();
8558 do_jump (TREE_OPERAND (rhs, 1),
8559 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8560 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8561 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8562 (TREE_CODE (rhs) == BIT_IOR_EXPR
8564 : integer_zero_node)),
8566 do_pending_stack_adjust ();
8571 temp = expand_assignment (lhs, rhs, ! ignore);
8577 if (!TREE_OPERAND (exp, 0))
8578 expand_null_return ();
8580 expand_return (TREE_OPERAND (exp, 0));
8583 case PREINCREMENT_EXPR:
8584 case PREDECREMENT_EXPR:
8585 return REDUCE_BIT_FIELD (expand_increment (exp, 0, ignore));
8587 case POSTINCREMENT_EXPR:
8588 case POSTDECREMENT_EXPR:
8589 /* Faster to treat as pre-increment if result is not used. */
8590 return REDUCE_BIT_FIELD (expand_increment (exp, ! ignore, ignore));
8593 if (modifier == EXPAND_STACK_PARM)
8595 /* If we are taking the address of something erroneous, just
8597 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8599 /* If we are taking the address of a constant and are at the
8600 top level, we have to use output_constant_def since we can't
8601 call force_const_mem at top level. */
8603 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8604 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
8606 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
8609 /* We make sure to pass const0_rtx down if we came in with
8610 ignore set, to avoid doing the cleanups twice for something. */
8611 op0 = expand_expr (TREE_OPERAND (exp, 0),
8612 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8613 (modifier == EXPAND_INITIALIZER
8614 ? modifier : EXPAND_CONST_ADDRESS));
8616 /* If we are going to ignore the result, OP0 will have been set
8617 to const0_rtx, so just return it. Don't get confused and
8618 think we are taking the address of the constant. */
8622 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8623 clever and returns a REG when given a MEM. */
8624 op0 = protect_from_queue (op0, 1);
8626 /* We would like the object in memory. If it is a constant, we can
8627 have it be statically allocated into memory. For a non-constant,
8628 we need to allocate some memory and store the value into it. */
8630 if (CONSTANT_P (op0))
8631 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8633 else if (REG_P (op0) || GET_CODE (op0) == SUBREG
8634 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == PARALLEL
8635 || GET_CODE (op0) == LO_SUM)
8637 /* If this object is in a register, it can't be BLKmode. */
8638 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8639 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8641 if (GET_CODE (op0) == PARALLEL)
8642 /* Handle calls that pass values in multiple
8643 non-contiguous locations. The Irix 6 ABI has examples
8645 emit_group_store (memloc, op0, inner_type,
8646 int_size_in_bytes (inner_type));
8648 emit_move_insn (memloc, op0);
8656 mark_temp_addr_taken (op0);
8657 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8659 op0 = XEXP (op0, 0);
8660 if (GET_MODE (op0) == Pmode && mode == ptr_mode)
8661 op0 = convert_memory_address (ptr_mode, op0);
8665 /* If OP0 is not aligned as least as much as the type requires, we
8666 need to make a temporary, copy OP0 to it, and take the address of
8667 the temporary. We want to use the alignment of the type, not of
8668 the operand. Note that this is incorrect for FUNCTION_TYPE, but
8669 the test for BLKmode means that can't happen. The test for
8670 BLKmode is because we never make mis-aligned MEMs with
8673 We don't need to do this at all if the machine doesn't have
8674 strict alignment. */
8675 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
8676 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
8678 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
8680 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8683 if (TYPE_ALIGN_OK (inner_type))
8686 if (TREE_ADDRESSABLE (inner_type))
8688 /* We can't make a bitwise copy of this object, so fail. */
8689 error ("cannot take the address of an unaligned member");
8693 new = assign_stack_temp_for_type
8694 (TYPE_MODE (inner_type),
8695 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
8696 : int_size_in_bytes (inner_type),
8697 1, build_qualified_type (inner_type,
8698 (TYPE_QUALS (inner_type)
8699 | TYPE_QUAL_CONST)));
8701 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)),
8702 (modifier == EXPAND_STACK_PARM
8703 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
8708 op0 = force_operand (XEXP (op0, 0), target);
8713 && modifier != EXPAND_CONST_ADDRESS
8714 && modifier != EXPAND_INITIALIZER
8715 && modifier != EXPAND_SUM)
8716 op0 = force_reg (Pmode, op0);
8719 && ! REG_USERVAR_P (op0))
8720 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8722 if (GET_MODE (op0) == Pmode && mode == ptr_mode)
8723 op0 = convert_memory_address (ptr_mode, op0);
8727 case ENTRY_VALUE_EXPR:
8730 /* COMPLEX type for Extended Pascal & Fortran */
8733 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8736 /* Get the rtx code of the operands. */
8737 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8738 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8741 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8745 /* Move the real (op0) and imaginary (op1) parts to their location. */
8746 emit_move_insn (gen_realpart (mode, target), op0);
8747 emit_move_insn (gen_imagpart (mode, target), op1);
8749 insns = get_insns ();
8752 /* Complex construction should appear as a single unit. */
8753 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8754 each with a separate pseudo as destination.
8755 It's not correct for flow to treat them as a unit. */
8756 if (GET_CODE (target) != CONCAT)
8757 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8765 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8766 return gen_realpart (mode, op0);
8769 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8770 return gen_imagpart (mode, op0);
8774 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8778 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8781 target = gen_reg_rtx (mode);
8785 /* Store the realpart and the negated imagpart to target. */
8786 emit_move_insn (gen_realpart (partmode, target),
8787 gen_realpart (partmode, op0));
8789 imag_t = gen_imagpart (partmode, target);
8790 temp = expand_unop (partmode,
8791 ! unsignedp && flag_trapv
8792 && (GET_MODE_CLASS(partmode) == MODE_INT)
8793 ? negv_optab : neg_optab,
8794 gen_imagpart (partmode, op0), imag_t, 0);
8796 emit_move_insn (imag_t, temp);
8798 insns = get_insns ();
8801 /* Conjugate should appear as a single unit
8802 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8803 each with a separate pseudo as destination.
8804 It's not correct for flow to treat them as a unit. */
8805 if (GET_CODE (target) != CONCAT)
8806 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8814 expand_resx_expr (exp);
8817 case TRY_CATCH_EXPR:
8819 case EH_FILTER_EXPR:
8820 case TRY_FINALLY_EXPR:
8821 /* Lowered by tree-eh.c. */
8824 case WITH_CLEANUP_EXPR:
8825 case CLEANUP_POINT_EXPR:
8827 /* Lowered by gimplify.c. */
8831 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8834 return get_exception_pointer (cfun);
8837 return get_exception_filter (cfun);
8840 /* Function descriptors are not valid except for as
8841 initialization constants, and should not be expanded. */
8845 expand_start_case (0, SWITCH_COND (exp), integer_type_node,
8847 if (SWITCH_BODY (exp))
8848 expand_expr_stmt (SWITCH_BODY (exp));
8849 if (SWITCH_LABELS (exp))
8852 tree vec = SWITCH_LABELS (exp);
8853 size_t i, n = TREE_VEC_LENGTH (vec);
8855 for (i = 0; i < n; ++i)
8857 tree elt = TREE_VEC_ELT (vec, i);
8858 tree controlling_expr_type = TREE_TYPE (SWITCH_COND (exp));
8859 tree min_value = TYPE_MIN_VALUE (controlling_expr_type);
8860 tree max_value = TYPE_MAX_VALUE (controlling_expr_type);
8862 tree case_low = CASE_LOW (elt);
8863 tree case_high = CASE_HIGH (elt) ? CASE_HIGH (elt) : case_low;
8864 if (case_low && case_high)
8866 /* Case label is less than minimum for type. */
8867 if (TREE_CODE (min_value) == INTEGER_CST
8868 && tree_int_cst_compare (case_low, min_value) < 0
8869 && tree_int_cst_compare (case_high, min_value) < 0)
8871 warning ("case label value %d is less than minimum value for type",
8872 TREE_INT_CST (case_low));
8876 /* Case value is greater than maximum for type. */
8877 if (TREE_CODE (max_value) == INTEGER_CST
8878 && tree_int_cst_compare (case_low, max_value) > 0
8879 && tree_int_cst_compare (case_high, max_value) > 0)
8881 warning ("case label value %d exceeds maximum value for type",
8882 TREE_INT_CST (case_high));
8886 /* Saturate lower case label value to minimum. */
8887 if (TREE_CODE (min_value) == INTEGER_CST
8888 && tree_int_cst_compare (case_high, min_value) >= 0
8889 && tree_int_cst_compare (case_low, min_value) < 0)
8891 warning ("lower value %d in case label range less than minimum value for type",
8892 TREE_INT_CST (case_low));
8893 case_low = min_value;
8896 /* Saturate upper case label value to maximum. */
8897 if (TREE_CODE (max_value) == INTEGER_CST
8898 && tree_int_cst_compare (case_low, max_value) <= 0
8899 && tree_int_cst_compare (case_high, max_value) > 0)
8901 warning ("upper value %d in case label range exceeds maximum value for type",
8902 TREE_INT_CST (case_high));
8903 case_high = max_value;
8907 add_case_node (case_low, case_high, CASE_LABEL (elt), &duplicate, true);
8912 expand_end_case_type (SWITCH_COND (exp), TREE_TYPE (exp));
8916 expand_label (TREE_OPERAND (exp, 0));
8919 case CASE_LABEL_EXPR:
8922 add_case_node (CASE_LOW (exp), CASE_HIGH (exp), CASE_LABEL (exp),
8930 expand_asm_expr (exp);
8934 return lang_hooks.expand_expr (exp, original_target, tmode,
8938 /* Here to do an ordinary binary operator, generating an instruction
8939 from the optab already placed in `this_optab'. */
8941 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8942 subtarget, &op0, &op1, 0);
8944 if (modifier == EXPAND_STACK_PARM)
8946 temp = expand_binop (mode, this_optab, op0, op1, target,
8947 unsignedp, OPTAB_LIB_WIDEN);
8950 return REDUCE_BIT_FIELD (temp);
8952 #undef REDUCE_BIT_FIELD
8954 /* Subroutine of above: reduce EXP to the precision of TYPE (in the
8955 signedness of TYPE), possibly returning the result in TARGET. */
8957 reduce_to_bit_field_precision (rtx exp, rtx target, tree type)
8959 HOST_WIDE_INT prec = TYPE_PRECISION (type);
8960 if (target && GET_MODE (target) != GET_MODE (exp))
8962 if (TYPE_UNSIGNED (type))
8965 if (prec < HOST_BITS_PER_WIDE_INT)
8966 mask = immed_double_const (((unsigned HOST_WIDE_INT) 1 << prec) - 1, 0,
8969 mask = immed_double_const ((unsigned HOST_WIDE_INT) -1,
8970 ((unsigned HOST_WIDE_INT) 1
8971 << (prec - HOST_BITS_PER_WIDE_INT)) - 1,
8973 return expand_and (GET_MODE (exp), exp, mask, target);
8977 tree count = build_int_2 (GET_MODE_BITSIZE (GET_MODE (exp)) - prec, 0);
8978 exp = expand_shift (LSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8979 return expand_shift (RSHIFT_EXPR, GET_MODE (exp), exp, count, target, 0);
8983 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
8984 when applied to the address of EXP produces an address known to be
8985 aligned more than BIGGEST_ALIGNMENT. */
8988 is_aligning_offset (tree offset, tree exp)
8990 /* Strip off any conversions. */
8991 while (TREE_CODE (offset) == NON_LVALUE_EXPR
8992 || TREE_CODE (offset) == NOP_EXPR
8993 || TREE_CODE (offset) == CONVERT_EXPR)
8994 offset = TREE_OPERAND (offset, 0);
8996 /* We must now have a BIT_AND_EXPR with a constant that is one less than
8997 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
8998 if (TREE_CODE (offset) != BIT_AND_EXPR
8999 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9000 || compare_tree_int (TREE_OPERAND (offset, 1),
9001 BIGGEST_ALIGNMENT / BITS_PER_UNIT) <= 0
9002 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9005 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9006 It must be NEGATE_EXPR. Then strip any more conversions. */
9007 offset = TREE_OPERAND (offset, 0);
9008 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9009 || TREE_CODE (offset) == NOP_EXPR
9010 || TREE_CODE (offset) == CONVERT_EXPR)
9011 offset = TREE_OPERAND (offset, 0);
9013 if (TREE_CODE (offset) != NEGATE_EXPR)
9016 offset = TREE_OPERAND (offset, 0);
9017 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9018 || TREE_CODE (offset) == NOP_EXPR
9019 || TREE_CODE (offset) == CONVERT_EXPR)
9020 offset = TREE_OPERAND (offset, 0);
9022 /* This must now be the address of EXP. */
9023 return TREE_CODE (offset) == ADDR_EXPR && TREE_OPERAND (offset, 0) == exp;
9026 /* Return the tree node if an ARG corresponds to a string constant or zero
9027 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9028 in bytes within the string that ARG is accessing. The type of the
9029 offset will be `sizetype'. */
9032 string_constant (tree arg, tree *ptr_offset)
9036 if (TREE_CODE (arg) == ADDR_EXPR
9037 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9039 *ptr_offset = size_zero_node;
9040 return TREE_OPERAND (arg, 0);
9042 if (TREE_CODE (arg) == ADDR_EXPR
9043 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
9044 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg, 0), 0)) == STRING_CST)
9046 *ptr_offset = convert (sizetype, TREE_OPERAND (TREE_OPERAND (arg, 0), 1));
9047 return TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
9049 else if (TREE_CODE (arg) == PLUS_EXPR)
9051 tree arg0 = TREE_OPERAND (arg, 0);
9052 tree arg1 = TREE_OPERAND (arg, 1);
9057 if (TREE_CODE (arg0) == ADDR_EXPR
9058 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9060 *ptr_offset = convert (sizetype, arg1);
9061 return TREE_OPERAND (arg0, 0);
9063 else if (TREE_CODE (arg1) == ADDR_EXPR
9064 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9066 *ptr_offset = convert (sizetype, arg0);
9067 return TREE_OPERAND (arg1, 0);
9074 /* Expand code for a post- or pre- increment or decrement
9075 and return the RTX for the result.
9076 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9079 expand_increment (tree exp, int post, int ignore)
9083 tree incremented = TREE_OPERAND (exp, 0);
9084 optab this_optab = add_optab;
9086 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9087 int op0_is_copy = 0;
9088 int single_insn = 0;
9089 /* 1 means we can't store into OP0 directly,
9090 because it is a subreg narrower than a word,
9091 and we don't dare clobber the rest of the word. */
9094 /* Stabilize any component ref that might need to be
9095 evaluated more than once below. */
9097 || TREE_CODE (incremented) == BIT_FIELD_REF
9098 || (TREE_CODE (incremented) == COMPONENT_REF
9099 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9100 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9101 incremented = stabilize_reference (incremented);
9102 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9103 ones into save exprs so that they don't accidentally get evaluated
9104 more than once by the code below. */
9105 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9106 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9107 incremented = save_expr (incremented);
9109 /* Compute the operands as RTX.
9110 Note whether OP0 is the actual lvalue or a copy of it:
9111 I believe it is a copy iff it is a register or subreg
9112 and insns were generated in computing it. */
9114 temp = get_last_insn ();
9115 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
9117 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9118 in place but instead must do sign- or zero-extension during assignment,
9119 so we copy it into a new register and let the code below use it as
9122 Note that we can safely modify this SUBREG since it is know not to be
9123 shared (it was made by the expand_expr call above). */
9125 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9128 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9132 else if (GET_CODE (op0) == SUBREG
9133 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9135 /* We cannot increment this SUBREG in place. If we are
9136 post-incrementing, get a copy of the old value. Otherwise,
9137 just mark that we cannot increment in place. */
9139 op0 = copy_to_reg (op0);
9144 op0_is_copy = ((GET_CODE (op0) == SUBREG || REG_P (op0))
9145 && temp != get_last_insn ());
9146 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9148 /* Decide whether incrementing or decrementing. */
9149 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9150 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9151 this_optab = sub_optab;
9153 /* Convert decrement by a constant into a negative increment. */
9154 if (this_optab == sub_optab
9155 && GET_CODE (op1) == CONST_INT)
9157 op1 = GEN_INT (-INTVAL (op1));
9158 this_optab = add_optab;
9161 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9162 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9164 /* For a preincrement, see if we can do this with a single instruction. */
9167 icode = (int) this_optab->handlers[(int) mode].insn_code;
9168 if (icode != (int) CODE_FOR_nothing
9169 /* Make sure that OP0 is valid for operands 0 and 1
9170 of the insn we want to queue. */
9171 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9172 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9173 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9177 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9178 then we cannot just increment OP0. We must therefore contrive to
9179 increment the original value. Then, for postincrement, we can return
9180 OP0 since it is a copy of the old value. For preincrement, expand here
9181 unless we can do it with a single insn.
9183 Likewise if storing directly into OP0 would clobber high bits
9184 we need to preserve (bad_subreg). */
9185 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9187 /* This is the easiest way to increment the value wherever it is.
9188 Problems with multiple evaluation of INCREMENTED are prevented
9189 because either (1) it is a component_ref or preincrement,
9190 in which case it was stabilized above, or (2) it is an array_ref
9191 with constant index in an array in a register, which is
9192 safe to reevaluate. */
9193 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9194 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9195 ? MINUS_EXPR : PLUS_EXPR),
9198 TREE_OPERAND (exp, 1));
9200 while (TREE_CODE (incremented) == NOP_EXPR
9201 || TREE_CODE (incremented) == CONVERT_EXPR)
9203 newexp = convert (TREE_TYPE (incremented), newexp);
9204 incremented = TREE_OPERAND (incremented, 0);
9207 temp = expand_assignment (incremented, newexp, ! post && ! ignore);
9208 return post ? op0 : temp;
9213 /* We have a true reference to the value in OP0.
9214 If there is an insn to add or subtract in this mode, queue it.
9215 Queuing the increment insn avoids the register shuffling
9216 that often results if we must increment now and first save
9217 the old value for subsequent use. */
9219 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9220 op0 = stabilize (op0);
9223 icode = (int) this_optab->handlers[(int) mode].insn_code;
9224 if (icode != (int) CODE_FOR_nothing
9225 /* Make sure that OP0 is valid for operands 0 and 1
9226 of the insn we want to queue. */
9227 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9228 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9230 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9231 op1 = force_reg (mode, op1);
9233 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9235 if (icode != (int) CODE_FOR_nothing && MEM_P (op0))
9237 rtx addr = (general_operand (XEXP (op0, 0), mode)
9238 ? force_reg (Pmode, XEXP (op0, 0))
9239 : copy_to_reg (XEXP (op0, 0)));
9242 op0 = replace_equiv_address (op0, addr);
9243 temp = force_reg (GET_MODE (op0), op0);
9244 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9245 op1 = force_reg (mode, op1);
9247 /* The increment queue is LIFO, thus we have to `queue'
9248 the instructions in reverse order. */
9249 enqueue_insn (op0, gen_move_insn (op0, temp));
9250 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9255 /* Preincrement, or we can't increment with one simple insn. */
9257 /* Save a copy of the value before inc or dec, to return it later. */
9258 temp = value = copy_to_reg (op0);
9260 /* Arrange to return the incremented value. */
9261 /* Copy the rtx because expand_binop will protect from the queue,
9262 and the results of that would be invalid for us to return
9263 if our caller does emit_queue before using our result. */
9264 temp = copy_rtx (value = op0);
9266 /* Increment however we can. */
9267 op1 = expand_binop (mode, this_optab, value, op1, op0,
9268 TYPE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9270 /* Make sure the value is stored into OP0. */
9272 emit_move_insn (op0, op1);
9277 /* Generate code to calculate EXP using a store-flag instruction
9278 and return an rtx for the result. EXP is either a comparison
9279 or a TRUTH_NOT_EXPR whose operand is a comparison.
9281 If TARGET is nonzero, store the result there if convenient.
9283 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
9286 Return zero if there is no suitable set-flag instruction
9287 available on this machine.
9289 Once expand_expr has been called on the arguments of the comparison,
9290 we are committed to doing the store flag, since it is not safe to
9291 re-evaluate the expression. We emit the store-flag insn by calling
9292 emit_store_flag, but only expand the arguments if we have a reason
9293 to believe that emit_store_flag will be successful. If we think that
9294 it will, but it isn't, we have to simulate the store-flag with a
9295 set/jump/set sequence. */
9298 do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
9301 tree arg0, arg1, type;
9303 enum machine_mode operand_mode;
9307 enum insn_code icode;
9308 rtx subtarget = target;
9311 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9312 result at the end. We can't simply invert the test since it would
9313 have already been inverted if it were valid. This case occurs for
9314 some floating-point comparisons. */
9316 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9317 invert = 1, exp = TREE_OPERAND (exp, 0);
9319 arg0 = TREE_OPERAND (exp, 0);
9320 arg1 = TREE_OPERAND (exp, 1);
9322 /* Don't crash if the comparison was erroneous. */
9323 if (arg0 == error_mark_node || arg1 == error_mark_node)
9326 type = TREE_TYPE (arg0);
9327 operand_mode = TYPE_MODE (type);
9328 unsignedp = TYPE_UNSIGNED (type);
9330 /* We won't bother with BLKmode store-flag operations because it would mean
9331 passing a lot of information to emit_store_flag. */
9332 if (operand_mode == BLKmode)
9335 /* We won't bother with store-flag operations involving function pointers
9336 when function pointers must be canonicalized before comparisons. */
9337 #ifdef HAVE_canonicalize_funcptr_for_compare
9338 if (HAVE_canonicalize_funcptr_for_compare
9339 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9340 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9342 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9343 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9344 == FUNCTION_TYPE))))
9351 /* Get the rtx comparison code to use. We know that EXP is a comparison
9352 operation of some type. Some comparisons against 1 and -1 can be
9353 converted to comparisons with zero. Do so here so that the tests
9354 below will be aware that we have a comparison with zero. These
9355 tests will not catch constants in the first operand, but constants
9356 are rarely passed as the first operand. */
9358 switch (TREE_CODE (exp))
9367 if (integer_onep (arg1))
9368 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9370 code = unsignedp ? LTU : LT;
9373 if (! unsignedp && integer_all_onesp (arg1))
9374 arg1 = integer_zero_node, code = LT;
9376 code = unsignedp ? LEU : LE;
9379 if (! unsignedp && integer_all_onesp (arg1))
9380 arg1 = integer_zero_node, code = GE;
9382 code = unsignedp ? GTU : GT;
9385 if (integer_onep (arg1))
9386 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9388 code = unsignedp ? GEU : GE;
9391 case UNORDERED_EXPR:
9420 /* Put a constant second. */
9421 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
9423 tem = arg0; arg0 = arg1; arg1 = tem;
9424 code = swap_condition (code);
9427 /* If this is an equality or inequality test of a single bit, we can
9428 do this by shifting the bit being tested to the low-order bit and
9429 masking the result with the constant 1. If the condition was EQ,
9430 we xor it with 1. This does not require an scc insn and is faster
9431 than an scc insn even if we have it.
9433 The code to make this transformation was moved into fold_single_bit_test,
9434 so we just call into the folder and expand its result. */
9436 if ((code == NE || code == EQ)
9437 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9438 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9440 tree type = lang_hooks.types.type_for_mode (mode, unsignedp);
9441 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
9443 target, VOIDmode, EXPAND_NORMAL);
9446 /* Now see if we are likely to be able to do this. Return if not. */
9447 if (! can_compare_p (code, operand_mode, ccp_store_flag))
9450 icode = setcc_gen_code[(int) code];
9451 if (icode == CODE_FOR_nothing
9452 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
9454 /* We can only do this if it is one of the special cases that
9455 can be handled without an scc insn. */
9456 if ((code == LT && integer_zerop (arg1))
9457 || (! only_cheap && code == GE && integer_zerop (arg1)))
9459 else if (BRANCH_COST >= 0
9460 && ! only_cheap && (code == NE || code == EQ)
9461 && TREE_CODE (type) != REAL_TYPE
9462 && ((abs_optab->handlers[(int) operand_mode].insn_code
9463 != CODE_FOR_nothing)
9464 || (ffs_optab->handlers[(int) operand_mode].insn_code
9465 != CODE_FOR_nothing)))
9471 if (! get_subtarget (target)
9472 || GET_MODE (subtarget) != operand_mode)
9475 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
9478 target = gen_reg_rtx (mode);
9480 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
9481 because, if the emit_store_flag does anything it will succeed and
9482 OP0 and OP1 will not be used subsequently. */
9484 result = emit_store_flag (target, code,
9485 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
9486 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
9487 operand_mode, unsignedp, 1);
9492 result = expand_binop (mode, xor_optab, result, const1_rtx,
9493 result, 0, OPTAB_LIB_WIDEN);
9497 /* If this failed, we have to do this with set/compare/jump/set code. */
9499 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9500 target = gen_reg_rtx (GET_MODE (target));
9502 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9503 result = compare_from_rtx (op0, op1, code, unsignedp,
9504 operand_mode, NULL_RTX);
9505 if (GET_CODE (result) == CONST_INT)
9506 return (((result == const0_rtx && ! invert)
9507 || (result != const0_rtx && invert))
9508 ? const0_rtx : const1_rtx);
9510 /* The code of RESULT may not match CODE if compare_from_rtx
9511 decided to swap its operands and reverse the original code.
9513 We know that compare_from_rtx returns either a CONST_INT or
9514 a new comparison code, so it is safe to just extract the
9515 code from RESULT. */
9516 code = GET_CODE (result);
9518 label = gen_label_rtx ();
9519 if (bcc_gen_fctn[(int) code] == 0)
9522 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
9523 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9530 /* Stubs in case we haven't got a casesi insn. */
9532 # define HAVE_casesi 0
9533 # define gen_casesi(a, b, c, d, e) (0)
9534 # define CODE_FOR_casesi CODE_FOR_nothing
9537 /* If the machine does not have a case insn that compares the bounds,
9538 this means extra overhead for dispatch tables, which raises the
9539 threshold for using them. */
9540 #ifndef CASE_VALUES_THRESHOLD
9541 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
9542 #endif /* CASE_VALUES_THRESHOLD */
9545 case_values_threshold (void)
9547 return CASE_VALUES_THRESHOLD;
9550 /* Attempt to generate a casesi instruction. Returns 1 if successful,
9551 0 otherwise (i.e. if there is no casesi instruction). */
9553 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
9554 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
9556 enum machine_mode index_mode = SImode;
9557 int index_bits = GET_MODE_BITSIZE (index_mode);
9558 rtx op1, op2, index;
9559 enum machine_mode op_mode;
9564 /* Convert the index to SImode. */
9565 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
9567 enum machine_mode omode = TYPE_MODE (index_type);
9568 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
9570 /* We must handle the endpoints in the original mode. */
9571 index_expr = build (MINUS_EXPR, index_type,
9572 index_expr, minval);
9573 minval = integer_zero_node;
9574 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9575 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
9576 omode, 1, default_label);
9577 /* Now we can safely truncate. */
9578 index = convert_to_mode (index_mode, index, 0);
9582 if (TYPE_MODE (index_type) != index_mode)
9584 index_expr = convert (lang_hooks.types.type_for_size
9585 (index_bits, 0), index_expr);
9586 index_type = TREE_TYPE (index_expr);
9589 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9592 index = protect_from_queue (index, 0);
9593 do_pending_stack_adjust ();
9595 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
9596 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
9598 index = copy_to_mode_reg (op_mode, index);
9600 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
9602 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
9603 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
9604 op1, TYPE_UNSIGNED (TREE_TYPE (minval)));
9605 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
9607 op1 = copy_to_mode_reg (op_mode, op1);
9609 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
9611 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
9612 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
9613 op2, TYPE_UNSIGNED (TREE_TYPE (range)));
9614 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
9616 op2 = copy_to_mode_reg (op_mode, op2);
9618 emit_jump_insn (gen_casesi (index, op1, op2,
9619 table_label, default_label));
9623 /* Attempt to generate a tablejump instruction; same concept. */
9624 #ifndef HAVE_tablejump
9625 #define HAVE_tablejump 0
9626 #define gen_tablejump(x, y) (0)
9629 /* Subroutine of the next function.
9631 INDEX is the value being switched on, with the lowest value
9632 in the table already subtracted.
9633 MODE is its expected mode (needed if INDEX is constant).
9634 RANGE is the length of the jump table.
9635 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9637 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9638 index value is out of range. */
9641 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
9646 if (INTVAL (range) > cfun->max_jumptable_ents)
9647 cfun->max_jumptable_ents = INTVAL (range);
9649 /* Do an unsigned comparison (in the proper mode) between the index
9650 expression and the value which represents the length of the range.
9651 Since we just finished subtracting the lower bound of the range
9652 from the index expression, this comparison allows us to simultaneously
9653 check that the original index expression value is both greater than
9654 or equal to the minimum value of the range and less than or equal to
9655 the maximum value of the range. */
9657 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
9660 /* If index is in range, it must fit in Pmode.
9661 Convert to Pmode so we can index with it. */
9663 index = convert_to_mode (Pmode, index, 1);
9665 /* Don't let a MEM slip through, because then INDEX that comes
9666 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9667 and break_out_memory_refs will go to work on it and mess it up. */
9668 #ifdef PIC_CASE_VECTOR_ADDRESS
9669 if (flag_pic && !REG_P (index))
9670 index = copy_to_mode_reg (Pmode, index);
9673 /* If flag_force_addr were to affect this address
9674 it could interfere with the tricky assumptions made
9675 about addresses that contain label-refs,
9676 which may be valid only very near the tablejump itself. */
9677 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9678 GET_MODE_SIZE, because this indicates how large insns are. The other
9679 uses should all be Pmode, because they are addresses. This code
9680 could fail if addresses and insns are not the same size. */
9681 index = gen_rtx_PLUS (Pmode,
9682 gen_rtx_MULT (Pmode, index,
9683 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9684 gen_rtx_LABEL_REF (Pmode, table_label));
9685 #ifdef PIC_CASE_VECTOR_ADDRESS
9687 index = PIC_CASE_VECTOR_ADDRESS (index);
9690 index = memory_address_noforce (CASE_VECTOR_MODE, index);
9691 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9692 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
9693 RTX_UNCHANGING_P (vector) = 1;
9694 MEM_NOTRAP_P (vector) = 1;
9695 convert_move (temp, vector, 0);
9697 emit_jump_insn (gen_tablejump (temp, table_label));
9699 /* If we are generating PIC code or if the table is PC-relative, the
9700 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9701 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
9706 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
9707 rtx table_label, rtx default_label)
9711 if (! HAVE_tablejump)
9714 index_expr = fold (build (MINUS_EXPR, index_type,
9715 convert (index_type, index_expr),
9716 convert (index_type, minval)));
9717 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9719 index = protect_from_queue (index, 0);
9720 do_pending_stack_adjust ();
9722 do_tablejump (index, TYPE_MODE (index_type),
9723 convert_modes (TYPE_MODE (index_type),
9724 TYPE_MODE (TREE_TYPE (range)),
9725 expand_expr (range, NULL_RTX,
9727 TYPE_UNSIGNED (TREE_TYPE (range))),
9728 table_label, default_label);
9732 /* Nonzero if the mode is a valid vector mode for this architecture.
9733 This returns nonzero even if there is no hardware support for the
9734 vector mode, but we can emulate with narrower modes. */
9737 vector_mode_valid_p (enum machine_mode mode)
9739 enum mode_class class = GET_MODE_CLASS (mode);
9740 enum machine_mode innermode;
9742 /* Doh! What's going on? */
9743 if (class != MODE_VECTOR_INT
9744 && class != MODE_VECTOR_FLOAT)
9747 /* Hardware support. Woo hoo! */
9748 if (VECTOR_MODE_SUPPORTED_P (mode))
9751 innermode = GET_MODE_INNER (mode);
9753 /* We should probably return 1 if requesting V4DI and we have no DI,
9754 but we have V2DI, but this is probably very unlikely. */
9756 /* If we have support for the inner mode, we can safely emulate it.
9757 We may not have V2DI, but me can emulate with a pair of DIs. */
9758 return mov_optab->handlers[innermode].insn_code != CODE_FOR_nothing;
9761 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
9763 const_vector_from_tree (tree exp)
9768 enum machine_mode inner, mode;
9770 mode = TYPE_MODE (TREE_TYPE (exp));
9772 if (initializer_zerop (exp))
9773 return CONST0_RTX (mode);
9775 units = GET_MODE_NUNITS (mode);
9776 inner = GET_MODE_INNER (mode);
9778 v = rtvec_alloc (units);
9780 link = TREE_VECTOR_CST_ELTS (exp);
9781 for (i = 0; link; link = TREE_CHAIN (link), ++i)
9783 elt = TREE_VALUE (link);
9785 if (TREE_CODE (elt) == REAL_CST)
9786 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
9789 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
9790 TREE_INT_CST_HIGH (elt),
9794 /* Initialize remaining elements to 0. */
9795 for (; i < units; ++i)
9796 RTVEC_ELT (v, i) = CONST0_RTX (inner);
9798 return gen_rtx_raw_CONST_VECTOR (mode, v);
9800 #include "gt-expr.h"