1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 #include "coretypes.h"
32 #include "hard-reg-set.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
44 #include "typeclass.h"
47 #include "langhooks.h"
52 /* Decide whether a function's arguments should be processed
53 from first to last or from last to first.
55 They should if the stack and args grow in opposite directions, but
56 only if we have push insns. */
60 #ifndef PUSH_ARGS_REVERSED
61 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
62 #define PUSH_ARGS_REVERSED /* If it's last to first. */
68 #ifndef STACK_PUSH_CODE
69 #ifdef STACK_GROWS_DOWNWARD
70 #define STACK_PUSH_CODE PRE_DEC
72 #define STACK_PUSH_CODE PRE_INC
76 /* Convert defined/undefined to boolean. */
77 #ifdef TARGET_MEM_FUNCTIONS
78 #undef TARGET_MEM_FUNCTIONS
79 #define TARGET_MEM_FUNCTIONS 1
81 #define TARGET_MEM_FUNCTIONS 0
85 /* If this is nonzero, we do not bother generating VOLATILE
86 around volatile memory references, and we are willing to
87 output indirect addresses. If cse is to follow, we reject
88 indirect addresses so a useful potential cse is generated;
89 if it is used only once, instruction combination will produce
90 the same indirect address eventually. */
93 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
94 tree placeholder_list = 0;
96 /* This structure is used by move_by_pieces to describe the move to
107 int explicit_inc_from;
108 unsigned HOST_WIDE_INT len;
109 HOST_WIDE_INT offset;
113 /* This structure is used by store_by_pieces to describe the clear to
116 struct store_by_pieces
122 unsigned HOST_WIDE_INT len;
123 HOST_WIDE_INT offset;
124 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
129 static rtx enqueue_insn (rtx, rtx);
130 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
132 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
133 struct move_by_pieces *);
134 static bool block_move_libcall_safe_for_call_parm (void);
135 static bool emit_block_move_via_movstr (rtx, rtx, rtx, unsigned);
136 static rtx emit_block_move_via_libcall (rtx, rtx, rtx);
137 static tree emit_block_move_libcall_fn (int);
138 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
139 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
140 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
141 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
142 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
143 struct store_by_pieces *);
144 static bool clear_storage_via_clrstr (rtx, rtx, unsigned);
145 static rtx clear_storage_via_libcall (rtx, rtx);
146 static tree clear_storage_libcall_fn (int);
147 static rtx compress_float_constant (rtx, rtx);
148 static rtx get_subtarget (rtx);
149 static int is_zeros_p (tree);
150 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
151 HOST_WIDE_INT, enum machine_mode,
152 tree, tree, int, int);
153 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
154 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
155 tree, enum machine_mode, int, tree, int);
156 static rtx var_rtx (tree);
158 static unsigned HOST_WIDE_INT highest_pow2_factor (tree);
159 static unsigned HOST_WIDE_INT highest_pow2_factor_for_type (tree, tree);
161 static int is_aligning_offset (tree, tree);
162 static rtx expand_increment (tree, int, int);
163 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
164 enum expand_modifier);
165 static rtx do_store_flag (tree, rtx, enum machine_mode, int);
167 static void emit_single_push_insn (enum machine_mode, rtx, tree);
169 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
170 static rtx const_vector_from_tree (tree);
172 /* Record for each mode whether we can move a register directly to or
173 from an object of that mode in memory. If we can't, we won't try
174 to use that mode directly when accessing a field of that mode. */
176 static char direct_load[NUM_MACHINE_MODES];
177 static char direct_store[NUM_MACHINE_MODES];
179 /* Record for each mode whether we can float-extend from memory. */
181 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
183 /* This macro is used to determine whether move_by_pieces should be called
184 to perform a structure copy. */
185 #ifndef MOVE_BY_PIECES_P
186 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
187 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
190 /* This macro is used to determine whether clear_by_pieces should be
191 called to clear storage. */
192 #ifndef CLEAR_BY_PIECES_P
193 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
194 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
197 /* This macro is used to determine whether store_by_pieces should be
198 called to "memset" storage with byte values other than zero, or
199 to "memcpy" storage when the source is a constant string. */
200 #ifndef STORE_BY_PIECES_P
201 #define STORE_BY_PIECES_P(SIZE, ALIGN) MOVE_BY_PIECES_P (SIZE, ALIGN)
204 /* This array records the insn_code of insns to perform block moves. */
205 enum insn_code movstr_optab[NUM_MACHINE_MODES];
207 /* This array records the insn_code of insns to perform block clears. */
208 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
210 /* These arrays record the insn_code of two different kinds of insns
211 to perform block compares. */
212 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
213 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
215 /* Stack of EXPR_WITH_FILE_LOCATION nested expressions. */
216 struct file_stack *expr_wfl_stack;
218 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
220 #ifndef SLOW_UNALIGNED_ACCESS
221 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
224 /* This is run once per compilation to set up which modes can be used
225 directly in memory and to initialize the block move optab. */
228 init_expr_once (void)
231 enum machine_mode mode;
236 /* Try indexing by frame ptr and try by stack ptr.
237 It is known that on the Convex the stack ptr isn't a valid index.
238 With luck, one or the other is valid on any machine. */
239 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
240 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
242 /* A scratch register we can modify in-place below to avoid
243 useless RTL allocations. */
244 reg = gen_rtx_REG (VOIDmode, -1);
246 insn = rtx_alloc (INSN);
247 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
248 PATTERN (insn) = pat;
250 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
251 mode = (enum machine_mode) ((int) mode + 1))
255 direct_load[(int) mode] = direct_store[(int) mode] = 0;
256 PUT_MODE (mem, mode);
257 PUT_MODE (mem1, mode);
258 PUT_MODE (reg, mode);
260 /* See if there is some register that can be used in this mode and
261 directly loaded or stored from memory. */
263 if (mode != VOIDmode && mode != BLKmode)
264 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
265 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
268 if (! HARD_REGNO_MODE_OK (regno, mode))
274 SET_DEST (pat) = reg;
275 if (recog (pat, insn, &num_clobbers) >= 0)
276 direct_load[(int) mode] = 1;
278 SET_SRC (pat) = mem1;
279 SET_DEST (pat) = reg;
280 if (recog (pat, insn, &num_clobbers) >= 0)
281 direct_load[(int) mode] = 1;
284 SET_DEST (pat) = mem;
285 if (recog (pat, insn, &num_clobbers) >= 0)
286 direct_store[(int) mode] = 1;
289 SET_DEST (pat) = mem1;
290 if (recog (pat, insn, &num_clobbers) >= 0)
291 direct_store[(int) mode] = 1;
295 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
297 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
298 mode = GET_MODE_WIDER_MODE (mode))
300 enum machine_mode srcmode;
301 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
302 srcmode = GET_MODE_WIDER_MODE (srcmode))
306 ic = can_extend_p (mode, srcmode, 0);
307 if (ic == CODE_FOR_nothing)
310 PUT_MODE (mem, srcmode);
312 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
313 float_extend_from_mem[mode][srcmode] = true;
318 /* This is run at the start of compiling a function. */
323 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
326 /* Small sanity check that the queue is empty at the end of a function. */
329 finish_expr_for_function (void)
335 /* Manage the queue of increment instructions to be output
336 for POSTINCREMENT_EXPR expressions, etc. */
338 /* Queue up to increment (or change) VAR later. BODY says how:
339 BODY should be the same thing you would pass to emit_insn
340 to increment right away. It will go to emit_insn later on.
342 The value is a QUEUED expression to be used in place of VAR
343 where you want to guarantee the pre-incrementation value of VAR. */
346 enqueue_insn (rtx var, rtx body)
348 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
349 body, pending_chain);
350 return pending_chain;
353 /* Use protect_from_queue to convert a QUEUED expression
354 into something that you can put immediately into an instruction.
355 If the queued incrementation has not happened yet,
356 protect_from_queue returns the variable itself.
357 If the incrementation has happened, protect_from_queue returns a temp
358 that contains a copy of the old value of the variable.
360 Any time an rtx which might possibly be a QUEUED is to be put
361 into an instruction, it must be passed through protect_from_queue first.
362 QUEUED expressions are not meaningful in instructions.
364 Do not pass a value through protect_from_queue and then hold
365 on to it for a while before putting it in an instruction!
366 If the queue is flushed in between, incorrect code will result. */
369 protect_from_queue (rtx x, int modify)
371 RTX_CODE code = GET_CODE (x);
373 #if 0 /* A QUEUED can hang around after the queue is forced out. */
374 /* Shortcut for most common case. */
375 if (pending_chain == 0)
381 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
382 use of autoincrement. Make a copy of the contents of the memory
383 location rather than a copy of the address, but not if the value is
384 of mode BLKmode. Don't modify X in place since it might be
386 if (code == MEM && GET_MODE (x) != BLKmode
387 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
390 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
394 rtx temp = gen_reg_rtx (GET_MODE (x));
396 emit_insn_before (gen_move_insn (temp, new),
401 /* Copy the address into a pseudo, so that the returned value
402 remains correct across calls to emit_queue. */
403 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
406 /* Otherwise, recursively protect the subexpressions of all
407 the kinds of rtx's that can contain a QUEUED. */
410 rtx tem = protect_from_queue (XEXP (x, 0), 0);
411 if (tem != XEXP (x, 0))
417 else if (code == PLUS || code == MULT)
419 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
420 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
421 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
430 /* If the increment has not happened, use the variable itself. Copy it
431 into a new pseudo so that the value remains correct across calls to
433 if (QUEUED_INSN (x) == 0)
434 return copy_to_reg (QUEUED_VAR (x));
435 /* If the increment has happened and a pre-increment copy exists,
437 if (QUEUED_COPY (x) != 0)
438 return QUEUED_COPY (x);
439 /* The increment has happened but we haven't set up a pre-increment copy.
440 Set one up now, and use it. */
441 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
442 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
444 return QUEUED_COPY (x);
447 /* Return nonzero if X contains a QUEUED expression:
448 if it contains anything that will be altered by a queued increment.
449 We handle only combinations of MEM, PLUS, MINUS and MULT operators
450 since memory addresses generally contain only those. */
453 queued_subexp_p (rtx x)
455 enum rtx_code code = GET_CODE (x);
461 return queued_subexp_p (XEXP (x, 0));
465 return (queued_subexp_p (XEXP (x, 0))
466 || queued_subexp_p (XEXP (x, 1)));
472 /* Perform all the pending incrementations. */
478 while ((p = pending_chain))
480 rtx body = QUEUED_BODY (p);
482 switch (GET_CODE (body))
490 QUEUED_INSN (p) = body;
494 #ifdef ENABLE_CHECKING
501 QUEUED_INSN (p) = emit_insn (body);
505 pending_chain = QUEUED_NEXT (p);
509 /* Copy data from FROM to TO, where the machine modes are not the same.
510 Both modes may be integer, or both may be floating.
511 UNSIGNEDP should be nonzero if FROM is an unsigned type.
512 This causes zero-extension instead of sign-extension. */
515 convert_move (rtx to, rtx from, int unsignedp)
517 enum machine_mode to_mode = GET_MODE (to);
518 enum machine_mode from_mode = GET_MODE (from);
519 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
520 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
524 /* rtx code for making an equivalent value. */
525 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
526 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
528 to = protect_from_queue (to, 1);
529 from = protect_from_queue (from, 0);
531 if (to_real != from_real)
534 /* If FROM is a SUBREG that indicates that we have already done at least
535 the required extension, strip it. We don't handle such SUBREGs as
538 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
539 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
540 >= GET_MODE_SIZE (to_mode))
541 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
542 from = gen_lowpart (to_mode, from), from_mode = to_mode;
544 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
547 if (to_mode == from_mode
548 || (from_mode == VOIDmode && CONSTANT_P (from)))
550 emit_move_insn (to, from);
554 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
556 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
559 if (VECTOR_MODE_P (to_mode))
560 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
562 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
564 emit_move_insn (to, from);
568 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
570 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
571 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
580 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
582 else if (GET_MODE_PRECISION (from_mode) > GET_MODE_PRECISION (to_mode))
587 /* Try converting directly if the insn is supported. */
589 code = tab->handlers[to_mode][from_mode].insn_code;
590 if (code != CODE_FOR_nothing)
592 emit_unop_insn (code, to, from,
593 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
597 /* Otherwise use a libcall. */
598 libcall = tab->handlers[to_mode][from_mode].libfunc;
601 /* This conversion is not implemented yet. */
605 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
607 insns = get_insns ();
609 emit_libcall_block (insns, to, value,
610 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
612 : gen_rtx_FLOAT_EXTEND (to_mode, from));
616 /* Handle pointer conversion. */ /* SPEE 900220. */
617 /* Targets are expected to provide conversion insns between PxImode and
618 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
619 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
621 enum machine_mode full_mode
622 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
624 if (trunc_optab->handlers[to_mode][full_mode].insn_code
628 if (full_mode != from_mode)
629 from = convert_to_mode (full_mode, from, unsignedp);
630 emit_unop_insn (trunc_optab->handlers[to_mode][full_mode].insn_code,
634 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
636 enum machine_mode full_mode
637 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
639 if (sext_optab->handlers[full_mode][from_mode].insn_code
643 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
645 if (to_mode == full_mode)
648 /* else proceed to integer conversions below */
649 from_mode = full_mode;
652 /* Now both modes are integers. */
654 /* Handle expanding beyond a word. */
655 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
656 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
663 enum machine_mode lowpart_mode;
664 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
666 /* Try converting directly if the insn is supported. */
667 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
670 /* If FROM is a SUBREG, put it into a register. Do this
671 so that we always generate the same set of insns for
672 better cse'ing; if an intermediate assignment occurred,
673 we won't be doing the operation directly on the SUBREG. */
674 if (optimize > 0 && GET_CODE (from) == SUBREG)
675 from = force_reg (from_mode, from);
676 emit_unop_insn (code, to, from, equiv_code);
679 /* Next, try converting via full word. */
680 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
681 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
682 != CODE_FOR_nothing))
684 if (GET_CODE (to) == REG)
685 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
686 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
687 emit_unop_insn (code, to,
688 gen_lowpart (word_mode, to), equiv_code);
692 /* No special multiword conversion insn; do it by hand. */
695 /* Since we will turn this into a no conflict block, we must ensure
696 that the source does not overlap the target. */
698 if (reg_overlap_mentioned_p (to, from))
699 from = force_reg (from_mode, from);
701 /* Get a copy of FROM widened to a word, if necessary. */
702 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
703 lowpart_mode = word_mode;
705 lowpart_mode = from_mode;
707 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
709 lowpart = gen_lowpart (lowpart_mode, to);
710 emit_move_insn (lowpart, lowfrom);
712 /* Compute the value to put in each remaining word. */
714 fill_value = const0_rtx;
719 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
720 && STORE_FLAG_VALUE == -1)
722 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
724 fill_value = gen_reg_rtx (word_mode);
725 emit_insn (gen_slt (fill_value));
731 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
732 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
734 fill_value = convert_to_mode (word_mode, fill_value, 1);
738 /* Fill the remaining words. */
739 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
741 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
742 rtx subword = operand_subword (to, index, 1, to_mode);
747 if (fill_value != subword)
748 emit_move_insn (subword, fill_value);
751 insns = get_insns ();
754 emit_no_conflict_block (insns, to, from, NULL_RTX,
755 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
759 /* Truncating multi-word to a word or less. */
760 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
761 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
763 if (!((GET_CODE (from) == MEM
764 && ! MEM_VOLATILE_P (from)
765 && direct_load[(int) to_mode]
766 && ! mode_dependent_address_p (XEXP (from, 0)))
767 || GET_CODE (from) == REG
768 || GET_CODE (from) == SUBREG))
769 from = force_reg (from_mode, from);
770 convert_move (to, gen_lowpart (word_mode, from), 0);
774 /* Now follow all the conversions between integers
775 no more than a word long. */
777 /* For truncation, usually we can just refer to FROM in a narrower mode. */
778 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
779 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
780 GET_MODE_BITSIZE (from_mode)))
782 if (!((GET_CODE (from) == MEM
783 && ! MEM_VOLATILE_P (from)
784 && direct_load[(int) to_mode]
785 && ! mode_dependent_address_p (XEXP (from, 0)))
786 || GET_CODE (from) == REG
787 || GET_CODE (from) == SUBREG))
788 from = force_reg (from_mode, from);
789 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
790 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
791 from = copy_to_reg (from);
792 emit_move_insn (to, gen_lowpart (to_mode, from));
796 /* Handle extension. */
797 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
799 /* Convert directly if that works. */
800 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
804 from = force_not_mem (from);
806 emit_unop_insn (code, to, from, equiv_code);
811 enum machine_mode intermediate;
815 /* Search for a mode to convert via. */
816 for (intermediate = from_mode; intermediate != VOIDmode;
817 intermediate = GET_MODE_WIDER_MODE (intermediate))
818 if (((can_extend_p (to_mode, intermediate, unsignedp)
820 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
821 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
822 GET_MODE_BITSIZE (intermediate))))
823 && (can_extend_p (intermediate, from_mode, unsignedp)
824 != CODE_FOR_nothing))
826 convert_move (to, convert_to_mode (intermediate, from,
827 unsignedp), unsignedp);
831 /* No suitable intermediate mode.
832 Generate what we need with shifts. */
833 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
834 - GET_MODE_BITSIZE (from_mode), 0);
835 from = gen_lowpart (to_mode, force_reg (from_mode, from));
836 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
838 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
841 emit_move_insn (to, tmp);
846 /* Support special truncate insns for certain modes. */
847 if (trunc_optab->handlers[to_mode][from_mode].insn_code != CODE_FOR_nothing)
849 emit_unop_insn (trunc_optab->handlers[to_mode][from_mode].insn_code,
854 /* Handle truncation of volatile memrefs, and so on;
855 the things that couldn't be truncated directly,
856 and for which there was no special instruction.
858 ??? Code above formerly short-circuited this, for most integer
859 mode pairs, with a force_reg in from_mode followed by a recursive
860 call to this routine. Appears always to have been wrong. */
861 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
863 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
864 emit_move_insn (to, temp);
868 /* Mode combination is not recognized. */
872 /* Return an rtx for a value that would result
873 from converting X to mode MODE.
874 Both X and MODE may be floating, or both integer.
875 UNSIGNEDP is nonzero if X is an unsigned value.
876 This can be done by referring to a part of X in place
877 or by copying to a new temporary with conversion.
879 This function *must not* call protect_from_queue
880 except when putting X into an insn (in which case convert_move does it). */
883 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
885 return convert_modes (mode, VOIDmode, x, unsignedp);
888 /* Return an rtx for a value that would result
889 from converting X from mode OLDMODE to mode MODE.
890 Both modes may be floating, or both integer.
891 UNSIGNEDP is nonzero if X is an unsigned value.
893 This can be done by referring to a part of X in place
894 or by copying to a new temporary with conversion.
896 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
898 This function *must not* call protect_from_queue
899 except when putting X into an insn (in which case convert_move does it). */
902 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
906 /* If FROM is a SUBREG that indicates that we have already done at least
907 the required extension, strip it. */
909 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
910 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
911 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
912 x = gen_lowpart (mode, x);
914 if (GET_MODE (x) != VOIDmode)
915 oldmode = GET_MODE (x);
920 /* There is one case that we must handle specially: If we are converting
921 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
922 we are to interpret the constant as unsigned, gen_lowpart will do
923 the wrong if the constant appears negative. What we want to do is
924 make the high-order word of the constant zero, not all ones. */
926 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
927 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
928 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
930 HOST_WIDE_INT val = INTVAL (x);
932 if (oldmode != VOIDmode
933 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
935 int width = GET_MODE_BITSIZE (oldmode);
937 /* We need to zero extend VAL. */
938 val &= ((HOST_WIDE_INT) 1 << width) - 1;
941 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
944 /* We can do this with a gen_lowpart if both desired and current modes
945 are integer, and this is either a constant integer, a register, or a
946 non-volatile MEM. Except for the constant case where MODE is no
947 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
949 if ((GET_CODE (x) == CONST_INT
950 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
951 || (GET_MODE_CLASS (mode) == MODE_INT
952 && GET_MODE_CLASS (oldmode) == MODE_INT
953 && (GET_CODE (x) == CONST_DOUBLE
954 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
955 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
956 && direct_load[(int) mode])
957 || (GET_CODE (x) == REG
958 && (! HARD_REGISTER_P (x)
959 || HARD_REGNO_MODE_OK (REGNO (x), mode))
960 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
961 GET_MODE_BITSIZE (GET_MODE (x)))))))))
963 /* ?? If we don't know OLDMODE, we have to assume here that
964 X does not need sign- or zero-extension. This may not be
965 the case, but it's the best we can do. */
966 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
967 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
969 HOST_WIDE_INT val = INTVAL (x);
970 int width = GET_MODE_BITSIZE (oldmode);
972 /* We must sign or zero-extend in this case. Start by
973 zero-extending, then sign extend if we need to. */
974 val &= ((HOST_WIDE_INT) 1 << width) - 1;
976 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
977 val |= (HOST_WIDE_INT) (-1) << width;
979 return gen_int_mode (val, mode);
982 return gen_lowpart (mode, x);
985 /* Converting from integer constant into mode is always equivalent to an
987 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
989 if (GET_MODE_BITSIZE (mode) != GET_MODE_BITSIZE (oldmode))
991 return simplify_gen_subreg (mode, x, oldmode, 0);
994 temp = gen_reg_rtx (mode);
995 convert_move (temp, x, unsignedp);
999 /* STORE_MAX_PIECES is the number of bytes at a time that we can
1000 store efficiently. Due to internal GCC limitations, this is
1001 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1002 for an immediate constant. */
1004 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1006 /* Determine whether the LEN bytes can be moved by using several move
1007 instructions. Return nonzero if a call to move_by_pieces should
1011 can_move_by_pieces (unsigned HOST_WIDE_INT len,
1012 unsigned int align ATTRIBUTE_UNUSED)
1014 return MOVE_BY_PIECES_P (len, align);
1017 /* Generate several move instructions to copy LEN bytes from block FROM to
1018 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1019 and TO through protect_from_queue before calling.
1021 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1022 used to push FROM to the stack.
1024 ALIGN is maximum stack alignment we can assume.
1026 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
1027 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
1031 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
1032 unsigned int align, int endp)
1034 struct move_by_pieces data;
1035 rtx to_addr, from_addr = XEXP (from, 0);
1036 unsigned int max_size = MOVE_MAX_PIECES + 1;
1037 enum machine_mode mode = VOIDmode, tmode;
1038 enum insn_code icode;
1040 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
1043 data.from_addr = from_addr;
1046 to_addr = XEXP (to, 0);
1049 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1050 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1052 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1059 #ifdef STACK_GROWS_DOWNWARD
1065 data.to_addr = to_addr;
1068 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1069 || GET_CODE (from_addr) == POST_INC
1070 || GET_CODE (from_addr) == POST_DEC);
1072 data.explicit_inc_from = 0;
1073 data.explicit_inc_to = 0;
1074 if (data.reverse) data.offset = len;
1077 /* If copying requires more than two move insns,
1078 copy addresses to registers (to make displacements shorter)
1079 and use post-increment if available. */
1080 if (!(data.autinc_from && data.autinc_to)
1081 && move_by_pieces_ninsns (len, align) > 2)
1083 /* Find the mode of the largest move... */
1084 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1085 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1086 if (GET_MODE_SIZE (tmode) < max_size)
1089 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1091 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1092 data.autinc_from = 1;
1093 data.explicit_inc_from = -1;
1095 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1097 data.from_addr = copy_addr_to_reg (from_addr);
1098 data.autinc_from = 1;
1099 data.explicit_inc_from = 1;
1101 if (!data.autinc_from && CONSTANT_P (from_addr))
1102 data.from_addr = copy_addr_to_reg (from_addr);
1103 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1105 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1107 data.explicit_inc_to = -1;
1109 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1111 data.to_addr = copy_addr_to_reg (to_addr);
1113 data.explicit_inc_to = 1;
1115 if (!data.autinc_to && CONSTANT_P (to_addr))
1116 data.to_addr = copy_addr_to_reg (to_addr);
1119 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1120 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1121 align = MOVE_MAX * BITS_PER_UNIT;
1123 /* First move what we can in the largest integer mode, then go to
1124 successively smaller modes. */
1126 while (max_size > 1)
1128 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1129 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1130 if (GET_MODE_SIZE (tmode) < max_size)
1133 if (mode == VOIDmode)
1136 icode = mov_optab->handlers[(int) mode].insn_code;
1137 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1138 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1140 max_size = GET_MODE_SIZE (mode);
1143 /* The code above should have handled everything. */
1157 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1158 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1160 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1163 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1170 to1 = adjust_address (data.to, QImode, data.offset);
1178 /* Return number of insns required to move L bytes by pieces.
1179 ALIGN (in bits) is maximum alignment we can assume. */
1181 static unsigned HOST_WIDE_INT
1182 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align)
1184 unsigned HOST_WIDE_INT n_insns = 0;
1185 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1187 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1188 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1189 align = MOVE_MAX * BITS_PER_UNIT;
1191 while (max_size > 1)
1193 enum machine_mode mode = VOIDmode, tmode;
1194 enum insn_code icode;
1196 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1197 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1198 if (GET_MODE_SIZE (tmode) < max_size)
1201 if (mode == VOIDmode)
1204 icode = mov_optab->handlers[(int) mode].insn_code;
1205 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1206 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1208 max_size = GET_MODE_SIZE (mode);
1216 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1217 with move instructions for mode MODE. GENFUN is the gen_... function
1218 to make a move insn for that mode. DATA has all the other info. */
1221 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1222 struct move_by_pieces *data)
1224 unsigned int size = GET_MODE_SIZE (mode);
1225 rtx to1 = NULL_RTX, from1;
1227 while (data->len >= size)
1230 data->offset -= size;
1234 if (data->autinc_to)
1235 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1238 to1 = adjust_address (data->to, mode, data->offset);
1241 if (data->autinc_from)
1242 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1245 from1 = adjust_address (data->from, mode, data->offset);
1247 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1248 emit_insn (gen_add2_insn (data->to_addr,
1249 GEN_INT (-(HOST_WIDE_INT)size)));
1250 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1251 emit_insn (gen_add2_insn (data->from_addr,
1252 GEN_INT (-(HOST_WIDE_INT)size)));
1255 emit_insn ((*genfun) (to1, from1));
1258 #ifdef PUSH_ROUNDING
1259 emit_single_push_insn (mode, from1, NULL);
1265 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1266 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1267 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1268 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1270 if (! data->reverse)
1271 data->offset += size;
1277 /* Emit code to move a block Y to a block X. This may be done with
1278 string-move instructions, with multiple scalar move instructions,
1279 or with a library call.
1281 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1282 SIZE is an rtx that says how long they are.
1283 ALIGN is the maximum alignment we can assume they have.
1284 METHOD describes what kind of copy this is, and what mechanisms may be used.
1286 Return the address of the new block, if memcpy is called and returns it,
1290 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1298 case BLOCK_OP_NORMAL:
1299 may_use_call = true;
1302 case BLOCK_OP_CALL_PARM:
1303 may_use_call = block_move_libcall_safe_for_call_parm ();
1305 /* Make inhibit_defer_pop nonzero around the library call
1306 to force it to pop the arguments right away. */
1310 case BLOCK_OP_NO_LIBCALL:
1311 may_use_call = false;
1318 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1320 if (GET_MODE (x) != BLKmode)
1322 if (GET_MODE (y) != BLKmode)
1325 x = protect_from_queue (x, 1);
1326 y = protect_from_queue (y, 0);
1327 size = protect_from_queue (size, 0);
1329 if (GET_CODE (x) != MEM)
1331 if (GET_CODE (y) != MEM)
1336 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1337 can be incorrect is coming from __builtin_memcpy. */
1338 if (GET_CODE (size) == CONST_INT)
1340 if (INTVAL (size) == 0)
1343 x = shallow_copy_rtx (x);
1344 y = shallow_copy_rtx (y);
1345 set_mem_size (x, size);
1346 set_mem_size (y, size);
1349 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1350 move_by_pieces (x, y, INTVAL (size), align, 0);
1351 else if (emit_block_move_via_movstr (x, y, size, align))
1353 else if (may_use_call)
1354 retval = emit_block_move_via_libcall (x, y, size);
1356 emit_block_move_via_loop (x, y, size, align);
1358 if (method == BLOCK_OP_CALL_PARM)
1364 /* A subroutine of emit_block_move. Returns true if calling the
1365 block move libcall will not clobber any parameters which may have
1366 already been placed on the stack. */
1369 block_move_libcall_safe_for_call_parm (void)
1371 /* If arguments are pushed on the stack, then they're safe. */
1375 /* If registers go on the stack anyway, any argument is sure to clobber
1376 an outgoing argument. */
1377 #if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1379 tree fn = emit_block_move_libcall_fn (false);
1381 if (REG_PARM_STACK_SPACE (fn) != 0)
1386 /* If any argument goes in memory, then it might clobber an outgoing
1389 CUMULATIVE_ARGS args_so_far;
1392 fn = emit_block_move_libcall_fn (false);
1393 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0);
1395 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1396 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1398 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1399 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1400 if (!tmp || !REG_P (tmp))
1402 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1403 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1407 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1413 /* A subroutine of emit_block_move. Expand a movstr pattern;
1414 return true if successful. */
1417 emit_block_move_via_movstr (rtx x, rtx y, rtx size, unsigned int align)
1419 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1420 enum machine_mode mode;
1422 /* Since this is a move insn, we don't care about volatility. */
1425 /* Try the most limited insn first, because there's no point
1426 including more than one in the machine description unless
1427 the more limited one has some advantage. */
1429 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1430 mode = GET_MODE_WIDER_MODE (mode))
1432 enum insn_code code = movstr_optab[(int) mode];
1433 insn_operand_predicate_fn pred;
1435 if (code != CODE_FOR_nothing
1436 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1437 here because if SIZE is less than the mode mask, as it is
1438 returned by the macro, it will definitely be less than the
1439 actual mode mask. */
1440 && ((GET_CODE (size) == CONST_INT
1441 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1442 <= (GET_MODE_MASK (mode) >> 1)))
1443 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1444 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1445 || (*pred) (x, BLKmode))
1446 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1447 || (*pred) (y, BLKmode))
1448 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1449 || (*pred) (opalign, VOIDmode)))
1452 rtx last = get_last_insn ();
1455 op2 = convert_to_mode (mode, size, 1);
1456 pred = insn_data[(int) code].operand[2].predicate;
1457 if (pred != 0 && ! (*pred) (op2, mode))
1458 op2 = copy_to_mode_reg (mode, op2);
1460 /* ??? When called via emit_block_move_for_call, it'd be
1461 nice if there were some way to inform the backend, so
1462 that it doesn't fail the expansion because it thinks
1463 emitting the libcall would be more efficient. */
1465 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1473 delete_insns_since (last);
1481 /* A subroutine of emit_block_move. Expand a call to memcpy or bcopy.
1482 Return the return value from memcpy, 0 otherwise. */
1485 emit_block_move_via_libcall (rtx dst, rtx src, rtx size)
1487 rtx dst_addr, src_addr;
1488 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1489 enum machine_mode size_mode;
1492 /* DST, SRC, or SIZE may have been passed through protect_from_queue.
1494 It is unsafe to save the value generated by protect_from_queue and reuse
1495 it later. Consider what happens if emit_queue is called before the
1496 return value from protect_from_queue is used.
1498 Expansion of the CALL_EXPR below will call emit_queue before we are
1499 finished emitting RTL for argument setup. So if we are not careful we
1500 could get the wrong value for an argument.
1502 To avoid this problem we go ahead and emit code to copy the addresses of
1503 DST and SRC and SIZE into new pseudos. We can then place those new
1504 pseudos into an RTL_EXPR and use them later, even after a call to
1507 Note this is not strictly needed for library calls since they do not call
1508 emit_queue before loading their arguments. However, we may need to have
1509 library calls call emit_queue in the future since failing to do so could
1510 cause problems for targets which define SMALL_REGISTER_CLASSES and pass
1511 arguments in registers. */
1513 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1514 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1516 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1517 src_addr = convert_memory_address (ptr_mode, src_addr);
1519 dst_tree = make_tree (ptr_type_node, dst_addr);
1520 src_tree = make_tree (ptr_type_node, src_addr);
1522 if (TARGET_MEM_FUNCTIONS)
1523 size_mode = TYPE_MODE (sizetype);
1525 size_mode = TYPE_MODE (unsigned_type_node);
1527 size = convert_to_mode (size_mode, size, 1);
1528 size = copy_to_mode_reg (size_mode, size);
1530 /* It is incorrect to use the libcall calling conventions to call
1531 memcpy in this context. This could be a user call to memcpy and
1532 the user may wish to examine the return value from memcpy. For
1533 targets where libcalls and normal calls have different conventions
1534 for returning pointers, we could end up generating incorrect code.
1536 For convenience, we generate the call to bcopy this way as well. */
1538 if (TARGET_MEM_FUNCTIONS)
1539 size_tree = make_tree (sizetype, size);
1541 size_tree = make_tree (unsigned_type_node, size);
1543 fn = emit_block_move_libcall_fn (true);
1544 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1545 if (TARGET_MEM_FUNCTIONS)
1547 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1548 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1552 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1553 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1556 /* Now we have to build up the CALL_EXPR itself. */
1557 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1558 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1559 call_expr, arg_list, NULL_TREE);
1561 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1563 /* If we are initializing a readonly value, show the above call clobbered
1564 it. Otherwise, a load from it may erroneously be hoisted from a loop, or
1565 the delay slot scheduler might overlook conflicts and take nasty
1567 if (RTX_UNCHANGING_P (dst))
1568 add_function_usage_to
1569 (last_call_insn (), gen_rtx_EXPR_LIST (VOIDmode,
1570 gen_rtx_CLOBBER (VOIDmode, dst),
1573 return TARGET_MEM_FUNCTIONS ? retval : NULL_RTX;
1576 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1577 for the function we use for block copies. The first time FOR_CALL
1578 is true, we call assemble_external. */
1580 static GTY(()) tree block_move_fn;
1583 init_block_move_fn (const char *asmspec)
1589 if (TARGET_MEM_FUNCTIONS)
1591 fn = get_identifier ("memcpy");
1592 args = build_function_type_list (ptr_type_node, ptr_type_node,
1593 const_ptr_type_node, sizetype,
1598 fn = get_identifier ("bcopy");
1599 args = build_function_type_list (void_type_node, const_ptr_type_node,
1600 ptr_type_node, unsigned_type_node,
1604 fn = build_decl (FUNCTION_DECL, fn, args);
1605 DECL_EXTERNAL (fn) = 1;
1606 TREE_PUBLIC (fn) = 1;
1607 DECL_ARTIFICIAL (fn) = 1;
1608 TREE_NOTHROW (fn) = 1;
1615 SET_DECL_RTL (block_move_fn, NULL_RTX);
1616 SET_DECL_ASSEMBLER_NAME (block_move_fn, get_identifier (asmspec));
1621 emit_block_move_libcall_fn (int for_call)
1623 static bool emitted_extern;
1626 init_block_move_fn (NULL);
1628 if (for_call && !emitted_extern)
1630 emitted_extern = true;
1631 make_decl_rtl (block_move_fn, NULL);
1632 assemble_external (block_move_fn);
1635 return block_move_fn;
1638 /* A subroutine of emit_block_move. Copy the data via an explicit
1639 loop. This is used only when libcalls are forbidden. */
1640 /* ??? It'd be nice to copy in hunks larger than QImode. */
1643 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1644 unsigned int align ATTRIBUTE_UNUSED)
1646 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1647 enum machine_mode iter_mode;
1649 iter_mode = GET_MODE (size);
1650 if (iter_mode == VOIDmode)
1651 iter_mode = word_mode;
1653 top_label = gen_label_rtx ();
1654 cmp_label = gen_label_rtx ();
1655 iter = gen_reg_rtx (iter_mode);
1657 emit_move_insn (iter, const0_rtx);
1659 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1660 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1661 do_pending_stack_adjust ();
1663 emit_note (NOTE_INSN_LOOP_BEG);
1665 emit_jump (cmp_label);
1666 emit_label (top_label);
1668 tmp = convert_modes (Pmode, iter_mode, iter, true);
1669 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1670 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1671 x = change_address (x, QImode, x_addr);
1672 y = change_address (y, QImode, y_addr);
1674 emit_move_insn (x, y);
1676 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1677 true, OPTAB_LIB_WIDEN);
1679 emit_move_insn (iter, tmp);
1681 emit_note (NOTE_INSN_LOOP_CONT);
1682 emit_label (cmp_label);
1684 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1687 emit_note (NOTE_INSN_LOOP_END);
1690 /* Copy all or part of a value X into registers starting at REGNO.
1691 The number of registers to be filled is NREGS. */
1694 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1697 #ifdef HAVE_load_multiple
1705 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1706 x = validize_mem (force_const_mem (mode, x));
1708 /* See if the machine can do this with a load multiple insn. */
1709 #ifdef HAVE_load_multiple
1710 if (HAVE_load_multiple)
1712 last = get_last_insn ();
1713 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1721 delete_insns_since (last);
1725 for (i = 0; i < nregs; i++)
1726 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1727 operand_subword_force (x, i, mode));
1730 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1731 The number of registers to be filled is NREGS. */
1734 move_block_from_reg (int regno, rtx x, int nregs)
1741 /* See if the machine can do this with a store multiple insn. */
1742 #ifdef HAVE_store_multiple
1743 if (HAVE_store_multiple)
1745 rtx last = get_last_insn ();
1746 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1754 delete_insns_since (last);
1758 for (i = 0; i < nregs; i++)
1760 rtx tem = operand_subword (x, i, 1, BLKmode);
1765 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1769 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1770 ORIG, where ORIG is a non-consecutive group of registers represented by
1771 a PARALLEL. The clone is identical to the original except in that the
1772 original set of registers is replaced by a new set of pseudo registers.
1773 The new set has the same modes as the original set. */
1776 gen_group_rtx (rtx orig)
1781 if (GET_CODE (orig) != PARALLEL)
1784 length = XVECLEN (orig, 0);
1785 tmps = alloca (sizeof (rtx) * length);
1787 /* Skip a NULL entry in first slot. */
1788 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1793 for (; i < length; i++)
1795 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1796 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1798 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1801 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1804 /* Emit code to move a block ORIG_SRC of type TYPE to a block DST,
1805 where DST is non-consecutive registers represented by a PARALLEL.
1806 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1810 emit_group_load (rtx dst, rtx orig_src, tree type ATTRIBUTE_UNUSED, int ssize)
1815 if (GET_CODE (dst) != PARALLEL)
1818 /* Check for a NULL entry, used to indicate that the parameter goes
1819 both on the stack and in registers. */
1820 if (XEXP (XVECEXP (dst, 0, 0), 0))
1825 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
1827 /* Process the pieces. */
1828 for (i = start; i < XVECLEN (dst, 0); i++)
1830 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1831 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1832 unsigned int bytelen = GET_MODE_SIZE (mode);
1835 /* Handle trailing fragments that run over the size of the struct. */
1836 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1838 /* Arrange to shift the fragment to where it belongs.
1839 extract_bit_field loads to the lsb of the reg. */
1841 #ifdef BLOCK_REG_PADDING
1842 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1843 == (BYTES_BIG_ENDIAN ? upward : downward)
1848 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1849 bytelen = ssize - bytepos;
1854 /* If we won't be loading directly from memory, protect the real source
1855 from strange tricks we might play; but make sure that the source can
1856 be loaded directly into the destination. */
1858 if (GET_CODE (orig_src) != MEM
1859 && (!CONSTANT_P (orig_src)
1860 || (GET_MODE (orig_src) != mode
1861 && GET_MODE (orig_src) != VOIDmode)))
1863 if (GET_MODE (orig_src) == VOIDmode)
1864 src = gen_reg_rtx (mode);
1866 src = gen_reg_rtx (GET_MODE (orig_src));
1868 emit_move_insn (src, orig_src);
1871 /* Optimize the access just a bit. */
1872 if (GET_CODE (src) == MEM
1873 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1874 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1875 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1876 && bytelen == GET_MODE_SIZE (mode))
1878 tmps[i] = gen_reg_rtx (mode);
1879 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1881 else if (GET_CODE (src) == CONCAT)
1883 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1884 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1886 if ((bytepos == 0 && bytelen == slen0)
1887 || (bytepos != 0 && bytepos + bytelen <= slen))
1889 /* The following assumes that the concatenated objects all
1890 have the same size. In this case, a simple calculation
1891 can be used to determine the object and the bit field
1893 tmps[i] = XEXP (src, bytepos / slen0);
1894 if (! CONSTANT_P (tmps[i])
1895 && (GET_CODE (tmps[i]) != REG || GET_MODE (tmps[i]) != mode))
1896 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1897 (bytepos % slen0) * BITS_PER_UNIT,
1898 1, NULL_RTX, mode, mode, ssize);
1900 else if (bytepos == 0)
1902 rtx mem = assign_stack_temp (GET_MODE (src), slen, 0);
1903 emit_move_insn (mem, src);
1904 tmps[i] = adjust_address (mem, mode, 0);
1909 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1910 SIMD register, which is currently broken. While we get GCC
1911 to emit proper RTL for these cases, let's dump to memory. */
1912 else if (VECTOR_MODE_P (GET_MODE (dst))
1913 && GET_CODE (src) == REG)
1915 int slen = GET_MODE_SIZE (GET_MODE (src));
1918 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1919 emit_move_insn (mem, src);
1920 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1922 else if (CONSTANT_P (src) && GET_MODE (dst) != BLKmode
1923 && XVECLEN (dst, 0) > 1)
1924 tmps[i] = simplify_gen_subreg (mode, src, GET_MODE(dst), bytepos);
1925 else if (CONSTANT_P (src)
1926 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
1929 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1930 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1934 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
1935 tmps[i], 0, OPTAB_WIDEN);
1940 /* Copy the extracted pieces into the proper (probable) hard regs. */
1941 for (i = start; i < XVECLEN (dst, 0); i++)
1942 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
1945 /* Emit code to move a block SRC to block DST, where SRC and DST are
1946 non-consecutive groups of registers, each represented by a PARALLEL. */
1949 emit_group_move (rtx dst, rtx src)
1953 if (GET_CODE (src) != PARALLEL
1954 || GET_CODE (dst) != PARALLEL
1955 || XVECLEN (src, 0) != XVECLEN (dst, 0))
1958 /* Skip first entry if NULL. */
1959 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1960 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1961 XEXP (XVECEXP (src, 0, i), 0));
1964 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1965 where SRC is non-consecutive registers represented by a PARALLEL.
1966 SSIZE represents the total size of block ORIG_DST, or -1 if not
1970 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
1975 if (GET_CODE (src) != PARALLEL)
1978 /* Check for a NULL entry, used to indicate that the parameter goes
1979 both on the stack and in registers. */
1980 if (XEXP (XVECEXP (src, 0, 0), 0))
1985 tmps = alloca (sizeof (rtx) * XVECLEN (src, 0));
1987 /* Copy the (probable) hard regs into pseudos. */
1988 for (i = start; i < XVECLEN (src, 0); i++)
1990 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1991 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1992 emit_move_insn (tmps[i], reg);
1996 /* If we won't be storing directly into memory, protect the real destination
1997 from strange tricks we might play. */
1999 if (GET_CODE (dst) == PARALLEL)
2003 /* We can get a PARALLEL dst if there is a conditional expression in
2004 a return statement. In that case, the dst and src are the same,
2005 so no action is necessary. */
2006 if (rtx_equal_p (dst, src))
2009 /* It is unclear if we can ever reach here, but we may as well handle
2010 it. Allocate a temporary, and split this into a store/load to/from
2013 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2014 emit_group_store (temp, src, type, ssize);
2015 emit_group_load (dst, temp, type, ssize);
2018 else if (GET_CODE (dst) != MEM && GET_CODE (dst) != CONCAT)
2020 dst = gen_reg_rtx (GET_MODE (orig_dst));
2021 /* Make life a bit easier for combine. */
2022 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
2025 /* Process the pieces. */
2026 for (i = start; i < XVECLEN (src, 0); i++)
2028 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2029 enum machine_mode mode = GET_MODE (tmps[i]);
2030 unsigned int bytelen = GET_MODE_SIZE (mode);
2033 /* Handle trailing fragments that run over the size of the struct. */
2034 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2036 /* store_bit_field always takes its value from the lsb.
2037 Move the fragment to the lsb if it's not already there. */
2039 #ifdef BLOCK_REG_PADDING
2040 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2041 == (BYTES_BIG_ENDIAN ? upward : downward)
2047 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2048 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2049 tmps[i], 0, OPTAB_WIDEN);
2051 bytelen = ssize - bytepos;
2054 if (GET_CODE (dst) == CONCAT)
2056 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2057 dest = XEXP (dst, 0);
2058 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2060 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2061 dest = XEXP (dst, 1);
2063 else if (bytepos == 0 && XVECLEN (src, 0))
2065 dest = assign_stack_temp (GET_MODE (dest),
2066 GET_MODE_SIZE (GET_MODE (dest)), 0);
2067 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2076 /* Optimize the access just a bit. */
2077 if (GET_CODE (dest) == MEM
2078 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2079 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2080 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2081 && bytelen == GET_MODE_SIZE (mode))
2082 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2084 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2085 mode, tmps[i], ssize);
2090 /* Copy from the pseudo into the (probable) hard reg. */
2091 if (orig_dst != dst)
2092 emit_move_insn (orig_dst, dst);
2095 /* Generate code to copy a BLKmode object of TYPE out of a
2096 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2097 is null, a stack temporary is created. TGTBLK is returned.
2099 The purpose of this routine is to handle functions that return
2100 BLKmode structures in registers. Some machines (the PA for example)
2101 want to return all small structures in registers regardless of the
2102 structure's alignment. */
2105 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2107 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2108 rtx src = NULL, dst = NULL;
2109 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2110 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2114 tgtblk = assign_temp (build_qualified_type (type,
2116 | TYPE_QUAL_CONST)),
2118 preserve_temp_slots (tgtblk);
2121 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2122 into a new pseudo which is a full word. */
2124 if (GET_MODE (srcreg) != BLKmode
2125 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2126 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2128 /* If the structure doesn't take up a whole number of words, see whether
2129 SRCREG is padded on the left or on the right. If it's on the left,
2130 set PADDING_CORRECTION to the number of bits to skip.
2132 In most ABIs, the structure will be returned at the least end of
2133 the register, which translates to right padding on little-endian
2134 targets and left padding on big-endian targets. The opposite
2135 holds if the structure is returned at the most significant
2136 end of the register. */
2137 if (bytes % UNITS_PER_WORD != 0
2138 && (targetm.calls.return_in_msb (type)
2140 : BYTES_BIG_ENDIAN))
2142 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2144 /* Copy the structure BITSIZE bites at a time.
2146 We could probably emit more efficient code for machines which do not use
2147 strict alignment, but it doesn't seem worth the effort at the current
2149 for (bitpos = 0, xbitpos = padding_correction;
2150 bitpos < bytes * BITS_PER_UNIT;
2151 bitpos += bitsize, xbitpos += bitsize)
2153 /* We need a new source operand each time xbitpos is on a
2154 word boundary and when xbitpos == padding_correction
2155 (the first time through). */
2156 if (xbitpos % BITS_PER_WORD == 0
2157 || xbitpos == padding_correction)
2158 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2161 /* We need a new destination operand each time bitpos is on
2163 if (bitpos % BITS_PER_WORD == 0)
2164 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2166 /* Use xbitpos for the source extraction (right justified) and
2167 xbitpos for the destination store (left justified). */
2168 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2169 extract_bit_field (src, bitsize,
2170 xbitpos % BITS_PER_WORD, 1,
2171 NULL_RTX, word_mode, word_mode,
2179 /* Add a USE expression for REG to the (possibly empty) list pointed
2180 to by CALL_FUSAGE. REG must denote a hard register. */
2183 use_reg (rtx *call_fusage, rtx reg)
2185 if (GET_CODE (reg) != REG
2186 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2190 = gen_rtx_EXPR_LIST (VOIDmode,
2191 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2194 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2195 starting at REGNO. All of these registers must be hard registers. */
2198 use_regs (rtx *call_fusage, int regno, int nregs)
2202 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2205 for (i = 0; i < nregs; i++)
2206 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2209 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2210 PARALLEL REGS. This is for calls that pass values in multiple
2211 non-contiguous locations. The Irix 6 ABI has examples of this. */
2214 use_group_regs (rtx *call_fusage, rtx regs)
2218 for (i = 0; i < XVECLEN (regs, 0); i++)
2220 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2222 /* A NULL entry means the parameter goes both on the stack and in
2223 registers. This can also be a MEM for targets that pass values
2224 partially on the stack and partially in registers. */
2225 if (reg != 0 && GET_CODE (reg) == REG)
2226 use_reg (call_fusage, reg);
2231 /* Determine whether the LEN bytes generated by CONSTFUN can be
2232 stored to memory using several move instructions. CONSTFUNDATA is
2233 a pointer which will be passed as argument in every CONSTFUN call.
2234 ALIGN is maximum alignment we can assume. Return nonzero if a
2235 call to store_by_pieces should succeed. */
2238 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2239 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2240 void *constfundata, unsigned int align)
2242 unsigned HOST_WIDE_INT max_size, l;
2243 HOST_WIDE_INT offset = 0;
2244 enum machine_mode mode, tmode;
2245 enum insn_code icode;
2252 if (! STORE_BY_PIECES_P (len, align))
2255 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2256 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2257 align = MOVE_MAX * BITS_PER_UNIT;
2259 /* We would first store what we can in the largest integer mode, then go to
2260 successively smaller modes. */
2263 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2268 max_size = STORE_MAX_PIECES + 1;
2269 while (max_size > 1)
2271 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2272 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2273 if (GET_MODE_SIZE (tmode) < max_size)
2276 if (mode == VOIDmode)
2279 icode = mov_optab->handlers[(int) mode].insn_code;
2280 if (icode != CODE_FOR_nothing
2281 && align >= GET_MODE_ALIGNMENT (mode))
2283 unsigned int size = GET_MODE_SIZE (mode);
2290 cst = (*constfun) (constfundata, offset, mode);
2291 if (!LEGITIMATE_CONSTANT_P (cst))
2301 max_size = GET_MODE_SIZE (mode);
2304 /* The code above should have handled everything. */
2312 /* Generate several move instructions to store LEN bytes generated by
2313 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2314 pointer which will be passed as argument in every CONSTFUN call.
2315 ALIGN is maximum alignment we can assume.
2316 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2317 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2321 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2322 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2323 void *constfundata, unsigned int align, int endp)
2325 struct store_by_pieces data;
2334 if (! STORE_BY_PIECES_P (len, align))
2336 to = protect_from_queue (to, 1);
2337 data.constfun = constfun;
2338 data.constfundata = constfundata;
2341 store_by_pieces_1 (&data, align);
2352 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2353 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2355 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2358 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2365 to1 = adjust_address (data.to, QImode, data.offset);
2373 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2374 rtx with BLKmode). The caller must pass TO through protect_from_queue
2375 before calling. ALIGN is maximum alignment we can assume. */
2378 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2380 struct store_by_pieces data;
2385 data.constfun = clear_by_pieces_1;
2386 data.constfundata = NULL;
2389 store_by_pieces_1 (&data, align);
2392 /* Callback routine for clear_by_pieces.
2393 Return const0_rtx unconditionally. */
2396 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2397 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2398 enum machine_mode mode ATTRIBUTE_UNUSED)
2403 /* Subroutine of clear_by_pieces and store_by_pieces.
2404 Generate several move instructions to store LEN bytes of block TO. (A MEM
2405 rtx with BLKmode). The caller must pass TO through protect_from_queue
2406 before calling. ALIGN is maximum alignment we can assume. */
2409 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2410 unsigned int align ATTRIBUTE_UNUSED)
2412 rtx to_addr = XEXP (data->to, 0);
2413 unsigned HOST_WIDE_INT max_size = STORE_MAX_PIECES + 1;
2414 enum machine_mode mode = VOIDmode, tmode;
2415 enum insn_code icode;
2418 data->to_addr = to_addr;
2420 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2421 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2423 data->explicit_inc_to = 0;
2425 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2427 data->offset = data->len;
2429 /* If storing requires more than two move insns,
2430 copy addresses to registers (to make displacements shorter)
2431 and use post-increment if available. */
2432 if (!data->autinc_to
2433 && move_by_pieces_ninsns (data->len, align) > 2)
2435 /* Determine the main mode we'll be using. */
2436 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2437 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2438 if (GET_MODE_SIZE (tmode) < max_size)
2441 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2443 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2444 data->autinc_to = 1;
2445 data->explicit_inc_to = -1;
2448 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2449 && ! data->autinc_to)
2451 data->to_addr = copy_addr_to_reg (to_addr);
2452 data->autinc_to = 1;
2453 data->explicit_inc_to = 1;
2456 if ( !data->autinc_to && CONSTANT_P (to_addr))
2457 data->to_addr = copy_addr_to_reg (to_addr);
2460 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2461 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2462 align = MOVE_MAX * BITS_PER_UNIT;
2464 /* First store what we can in the largest integer mode, then go to
2465 successively smaller modes. */
2467 while (max_size > 1)
2469 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2470 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2471 if (GET_MODE_SIZE (tmode) < max_size)
2474 if (mode == VOIDmode)
2477 icode = mov_optab->handlers[(int) mode].insn_code;
2478 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2479 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2481 max_size = GET_MODE_SIZE (mode);
2484 /* The code above should have handled everything. */
2489 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2490 with move instructions for mode MODE. GENFUN is the gen_... function
2491 to make a move insn for that mode. DATA has all the other info. */
2494 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2495 struct store_by_pieces *data)
2497 unsigned int size = GET_MODE_SIZE (mode);
2500 while (data->len >= size)
2503 data->offset -= size;
2505 if (data->autinc_to)
2506 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2509 to1 = adjust_address (data->to, mode, data->offset);
2511 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2512 emit_insn (gen_add2_insn (data->to_addr,
2513 GEN_INT (-(HOST_WIDE_INT) size)));
2515 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2516 emit_insn ((*genfun) (to1, cst));
2518 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2519 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2521 if (! data->reverse)
2522 data->offset += size;
2528 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2529 its length in bytes. */
2532 clear_storage (rtx object, rtx size)
2535 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2536 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2538 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2539 just move a zero. Otherwise, do this a piece at a time. */
2540 if (GET_MODE (object) != BLKmode
2541 && GET_CODE (size) == CONST_INT
2542 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
2543 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2546 object = protect_from_queue (object, 1);
2547 size = protect_from_queue (size, 0);
2549 if (size == const0_rtx)
2551 else if (GET_CODE (size) == CONST_INT
2552 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2553 clear_by_pieces (object, INTVAL (size), align);
2554 else if (clear_storage_via_clrstr (object, size, align))
2557 retval = clear_storage_via_libcall (object, size);
2563 /* A subroutine of clear_storage. Expand a clrstr pattern;
2564 return true if successful. */
2567 clear_storage_via_clrstr (rtx object, rtx size, unsigned int align)
2569 /* Try the most limited insn first, because there's no point
2570 including more than one in the machine description unless
2571 the more limited one has some advantage. */
2573 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2574 enum machine_mode mode;
2576 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2577 mode = GET_MODE_WIDER_MODE (mode))
2579 enum insn_code code = clrstr_optab[(int) mode];
2580 insn_operand_predicate_fn pred;
2582 if (code != CODE_FOR_nothing
2583 /* We don't need MODE to be narrower than
2584 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2585 the mode mask, as it is returned by the macro, it will
2586 definitely be less than the actual mode mask. */
2587 && ((GET_CODE (size) == CONST_INT
2588 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2589 <= (GET_MODE_MASK (mode) >> 1)))
2590 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2591 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2592 || (*pred) (object, BLKmode))
2593 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2594 || (*pred) (opalign, VOIDmode)))
2597 rtx last = get_last_insn ();
2600 op1 = convert_to_mode (mode, size, 1);
2601 pred = insn_data[(int) code].operand[1].predicate;
2602 if (pred != 0 && ! (*pred) (op1, mode))
2603 op1 = copy_to_mode_reg (mode, op1);
2605 pat = GEN_FCN ((int) code) (object, op1, opalign);
2612 delete_insns_since (last);
2619 /* A subroutine of clear_storage. Expand a call to memset or bzero.
2620 Return the return value of memset, 0 otherwise. */
2623 clear_storage_via_libcall (rtx object, rtx size)
2625 tree call_expr, arg_list, fn, object_tree, size_tree;
2626 enum machine_mode size_mode;
2629 /* OBJECT or SIZE may have been passed through protect_from_queue.
2631 It is unsafe to save the value generated by protect_from_queue
2632 and reuse it later. Consider what happens if emit_queue is
2633 called before the return value from protect_from_queue is used.
2635 Expansion of the CALL_EXPR below will call emit_queue before
2636 we are finished emitting RTL for argument setup. So if we are
2637 not careful we could get the wrong value for an argument.
2639 To avoid this problem we go ahead and emit code to copy OBJECT
2640 and SIZE into new pseudos. We can then place those new pseudos
2641 into an RTL_EXPR and use them later, even after a call to
2644 Note this is not strictly needed for library calls since they
2645 do not call emit_queue before loading their arguments. However,
2646 we may need to have library calls call emit_queue in the future
2647 since failing to do so could cause problems for targets which
2648 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2650 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2652 if (TARGET_MEM_FUNCTIONS)
2653 size_mode = TYPE_MODE (sizetype);
2655 size_mode = TYPE_MODE (unsigned_type_node);
2656 size = convert_to_mode (size_mode, size, 1);
2657 size = copy_to_mode_reg (size_mode, size);
2659 /* It is incorrect to use the libcall calling conventions to call
2660 memset in this context. This could be a user call to memset and
2661 the user may wish to examine the return value from memset. For
2662 targets where libcalls and normal calls have different conventions
2663 for returning pointers, we could end up generating incorrect code.
2665 For convenience, we generate the call to bzero this way as well. */
2667 object_tree = make_tree (ptr_type_node, object);
2668 if (TARGET_MEM_FUNCTIONS)
2669 size_tree = make_tree (sizetype, size);
2671 size_tree = make_tree (unsigned_type_node, size);
2673 fn = clear_storage_libcall_fn (true);
2674 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
2675 if (TARGET_MEM_FUNCTIONS)
2676 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
2677 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
2679 /* Now we have to build up the CALL_EXPR itself. */
2680 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2681 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2682 call_expr, arg_list, NULL_TREE);
2684 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2686 /* If we are initializing a readonly value, show the above call
2687 clobbered it. Otherwise, a load from it may erroneously be
2688 hoisted from a loop. */
2689 if (RTX_UNCHANGING_P (object))
2690 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
2692 return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
2695 /* A subroutine of clear_storage_via_libcall. Create the tree node
2696 for the function we use for block clears. The first time FOR_CALL
2697 is true, we call assemble_external. */
2699 static GTY(()) tree block_clear_fn;
2702 init_block_clear_fn (const char *asmspec)
2704 if (!block_clear_fn)
2708 if (TARGET_MEM_FUNCTIONS)
2710 fn = get_identifier ("memset");
2711 args = build_function_type_list (ptr_type_node, ptr_type_node,
2712 integer_type_node, sizetype,
2717 fn = get_identifier ("bzero");
2718 args = build_function_type_list (void_type_node, ptr_type_node,
2719 unsigned_type_node, NULL_TREE);
2722 fn = build_decl (FUNCTION_DECL, fn, args);
2723 DECL_EXTERNAL (fn) = 1;
2724 TREE_PUBLIC (fn) = 1;
2725 DECL_ARTIFICIAL (fn) = 1;
2726 TREE_NOTHROW (fn) = 1;
2728 block_clear_fn = fn;
2733 SET_DECL_RTL (block_clear_fn, NULL_RTX);
2734 SET_DECL_ASSEMBLER_NAME (block_clear_fn, get_identifier (asmspec));
2739 clear_storage_libcall_fn (int for_call)
2741 static bool emitted_extern;
2743 if (!block_clear_fn)
2744 init_block_clear_fn (NULL);
2746 if (for_call && !emitted_extern)
2748 emitted_extern = true;
2749 make_decl_rtl (block_clear_fn, NULL);
2750 assemble_external (block_clear_fn);
2753 return block_clear_fn;
2756 /* Generate code to copy Y into X.
2757 Both Y and X must have the same mode, except that
2758 Y can be a constant with VOIDmode.
2759 This mode cannot be BLKmode; use emit_block_move for that.
2761 Return the last instruction emitted. */
2764 emit_move_insn (rtx x, rtx y)
2766 enum machine_mode mode = GET_MODE (x);
2767 rtx y_cst = NULL_RTX;
2770 x = protect_from_queue (x, 1);
2771 y = protect_from_queue (y, 0);
2773 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2776 /* Never force constant_p_rtx to memory. */
2777 if (GET_CODE (y) == CONSTANT_P_RTX)
2779 else if (CONSTANT_P (y))
2782 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
2783 && (last_insn = compress_float_constant (x, y)))
2788 if (!LEGITIMATE_CONSTANT_P (y))
2790 y = force_const_mem (mode, y);
2792 /* If the target's cannot_force_const_mem prevented the spill,
2793 assume that the target's move expanders will also take care
2794 of the non-legitimate constant. */
2800 /* If X or Y are memory references, verify that their addresses are valid
2802 if (GET_CODE (x) == MEM
2803 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2804 && ! push_operand (x, GET_MODE (x)))
2806 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2807 x = validize_mem (x);
2809 if (GET_CODE (y) == MEM
2810 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2812 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2813 y = validize_mem (y);
2815 if (mode == BLKmode)
2818 last_insn = emit_move_insn_1 (x, y);
2820 if (y_cst && GET_CODE (x) == REG
2821 && (set = single_set (last_insn)) != NULL_RTX
2822 && SET_DEST (set) == x
2823 && ! rtx_equal_p (y_cst, SET_SRC (set)))
2824 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
2829 /* Low level part of emit_move_insn.
2830 Called just like emit_move_insn, but assumes X and Y
2831 are basically valid. */
2834 emit_move_insn_1 (rtx x, rtx y)
2836 enum machine_mode mode = GET_MODE (x);
2837 enum machine_mode submode;
2838 enum mode_class class = GET_MODE_CLASS (mode);
2840 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
2843 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2845 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2847 /* Expand complex moves by moving real part and imag part, if possible. */
2848 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2849 && BLKmode != (submode = GET_MODE_INNER (mode))
2850 && (mov_optab->handlers[(int) submode].insn_code
2851 != CODE_FOR_nothing))
2853 /* Don't split destination if it is a stack push. */
2854 int stack = push_operand (x, GET_MODE (x));
2856 #ifdef PUSH_ROUNDING
2857 /* In case we output to the stack, but the size is smaller than the
2858 machine can push exactly, we need to use move instructions. */
2860 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
2861 != GET_MODE_SIZE (submode)))
2864 HOST_WIDE_INT offset1, offset2;
2866 /* Do not use anti_adjust_stack, since we don't want to update
2867 stack_pointer_delta. */
2868 temp = expand_binop (Pmode,
2869 #ifdef STACK_GROWS_DOWNWARD
2877 (GET_MODE_SIZE (GET_MODE (x)))),
2878 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
2880 if (temp != stack_pointer_rtx)
2881 emit_move_insn (stack_pointer_rtx, temp);
2883 #ifdef STACK_GROWS_DOWNWARD
2885 offset2 = GET_MODE_SIZE (submode);
2887 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2888 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2889 + GET_MODE_SIZE (submode));
2892 emit_move_insn (change_address (x, submode,
2893 gen_rtx_PLUS (Pmode,
2895 GEN_INT (offset1))),
2896 gen_realpart (submode, y));
2897 emit_move_insn (change_address (x, submode,
2898 gen_rtx_PLUS (Pmode,
2900 GEN_INT (offset2))),
2901 gen_imagpart (submode, y));
2905 /* If this is a stack, push the highpart first, so it
2906 will be in the argument order.
2908 In that case, change_address is used only to convert
2909 the mode, not to change the address. */
2912 /* Note that the real part always precedes the imag part in memory
2913 regardless of machine's endianness. */
2914 #ifdef STACK_GROWS_DOWNWARD
2915 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2916 gen_imagpart (submode, y));
2917 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2918 gen_realpart (submode, y));
2920 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2921 gen_realpart (submode, y));
2922 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2923 gen_imagpart (submode, y));
2928 rtx realpart_x, realpart_y;
2929 rtx imagpart_x, imagpart_y;
2931 /* If this is a complex value with each part being smaller than a
2932 word, the usual calling sequence will likely pack the pieces into
2933 a single register. Unfortunately, SUBREG of hard registers only
2934 deals in terms of words, so we have a problem converting input
2935 arguments to the CONCAT of two registers that is used elsewhere
2936 for complex values. If this is before reload, we can copy it into
2937 memory and reload. FIXME, we should see about using extract and
2938 insert on integer registers, but complex short and complex char
2939 variables should be rarely used. */
2940 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2941 && (reload_in_progress | reload_completed) == 0)
2944 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2946 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2948 if (packed_dest_p || packed_src_p)
2950 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2951 ? MODE_FLOAT : MODE_INT);
2953 enum machine_mode reg_mode
2954 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2956 if (reg_mode != BLKmode)
2958 rtx mem = assign_stack_temp (reg_mode,
2959 GET_MODE_SIZE (mode), 0);
2960 rtx cmem = adjust_address (mem, mode, 0);
2963 = N_("function using short complex types cannot be inline");
2967 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2969 emit_move_insn_1 (cmem, y);
2970 return emit_move_insn_1 (sreg, mem);
2974 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2976 emit_move_insn_1 (mem, sreg);
2977 return emit_move_insn_1 (x, cmem);
2983 realpart_x = gen_realpart (submode, x);
2984 realpart_y = gen_realpart (submode, y);
2985 imagpart_x = gen_imagpart (submode, x);
2986 imagpart_y = gen_imagpart (submode, y);
2988 /* Show the output dies here. This is necessary for SUBREGs
2989 of pseudos since we cannot track their lifetimes correctly;
2990 hard regs shouldn't appear here except as return values.
2991 We never want to emit such a clobber after reload. */
2993 && ! (reload_in_progress || reload_completed)
2994 && (GET_CODE (realpart_x) == SUBREG
2995 || GET_CODE (imagpart_x) == SUBREG))
2996 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2998 emit_move_insn (realpart_x, realpart_y);
2999 emit_move_insn (imagpart_x, imagpart_y);
3002 return get_last_insn ();
3005 /* Handle MODE_CC modes: If we don't have a special move insn for this mode,
3006 find a mode to do it in. If we have a movcc, use it. Otherwise,
3007 find the MODE_INT mode of the same width. */
3008 else if (GET_MODE_CLASS (mode) == MODE_CC
3009 && mov_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
3011 enum insn_code insn_code;
3012 enum machine_mode tmode = VOIDmode;
3016 && mov_optab->handlers[(int) CCmode].insn_code != CODE_FOR_nothing)
3019 for (tmode = QImode; tmode != VOIDmode;
3020 tmode = GET_MODE_WIDER_MODE (tmode))
3021 if (GET_MODE_SIZE (tmode) == GET_MODE_SIZE (mode))
3024 if (tmode == VOIDmode)
3027 /* Get X and Y in TMODE. We can't use gen_lowpart here because it
3028 may call change_address which is not appropriate if we were
3029 called when a reload was in progress. We don't have to worry
3030 about changing the address since the size in bytes is supposed to
3031 be the same. Copy the MEM to change the mode and move any
3032 substitutions from the old MEM to the new one. */
3034 if (reload_in_progress)
3036 x = gen_lowpart_common (tmode, x1);
3037 if (x == 0 && GET_CODE (x1) == MEM)
3039 x = adjust_address_nv (x1, tmode, 0);
3040 copy_replacements (x1, x);
3043 y = gen_lowpart_common (tmode, y1);
3044 if (y == 0 && GET_CODE (y1) == MEM)
3046 y = adjust_address_nv (y1, tmode, 0);
3047 copy_replacements (y1, y);
3052 x = gen_lowpart (tmode, x);
3053 y = gen_lowpart (tmode, y);
3056 insn_code = mov_optab->handlers[(int) tmode].insn_code;
3057 return emit_insn (GEN_FCN (insn_code) (x, y));
3060 /* Try using a move pattern for the corresponding integer mode. This is
3061 only safe when simplify_subreg can convert MODE constants into integer
3062 constants. At present, it can only do this reliably if the value
3063 fits within a HOST_WIDE_INT. */
3064 else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3065 && (submode = int_mode_for_mode (mode)) != BLKmode
3066 && mov_optab->handlers[submode].insn_code != CODE_FOR_nothing)
3067 return emit_insn (GEN_FCN (mov_optab->handlers[submode].insn_code)
3068 (simplify_gen_subreg (submode, x, mode, 0),
3069 simplify_gen_subreg (submode, y, mode, 0)));
3071 /* This will handle any multi-word or full-word mode that lacks a move_insn
3072 pattern. However, you will get better code if you define such patterns,
3073 even if they must turn into multiple assembler instructions. */
3074 else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
3081 #ifdef PUSH_ROUNDING
3083 /* If X is a push on the stack, do the push now and replace
3084 X with a reference to the stack pointer. */
3085 if (push_operand (x, GET_MODE (x)))
3090 /* Do not use anti_adjust_stack, since we don't want to update
3091 stack_pointer_delta. */
3092 temp = expand_binop (Pmode,
3093 #ifdef STACK_GROWS_DOWNWARD
3101 (GET_MODE_SIZE (GET_MODE (x)))),
3102 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3104 if (temp != stack_pointer_rtx)
3105 emit_move_insn (stack_pointer_rtx, temp);
3107 code = GET_CODE (XEXP (x, 0));
3109 /* Just hope that small offsets off SP are OK. */
3110 if (code == POST_INC)
3111 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3112 GEN_INT (-((HOST_WIDE_INT)
3113 GET_MODE_SIZE (GET_MODE (x)))));
3114 else if (code == POST_DEC)
3115 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3116 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3118 temp = stack_pointer_rtx;
3120 x = change_address (x, VOIDmode, temp);
3124 /* If we are in reload, see if either operand is a MEM whose address
3125 is scheduled for replacement. */
3126 if (reload_in_progress && GET_CODE (x) == MEM
3127 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3128 x = replace_equiv_address_nv (x, inner);
3129 if (reload_in_progress && GET_CODE (y) == MEM
3130 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3131 y = replace_equiv_address_nv (y, inner);
3137 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3140 rtx xpart = operand_subword (x, i, 1, mode);
3141 rtx ypart = operand_subword (y, i, 1, mode);
3143 /* If we can't get a part of Y, put Y into memory if it is a
3144 constant. Otherwise, force it into a register. If we still
3145 can't get a part of Y, abort. */
3146 if (ypart == 0 && CONSTANT_P (y))
3148 y = force_const_mem (mode, y);
3149 ypart = operand_subword (y, i, 1, mode);
3151 else if (ypart == 0)
3152 ypart = operand_subword_force (y, i, mode);
3154 if (xpart == 0 || ypart == 0)
3157 need_clobber |= (GET_CODE (xpart) == SUBREG);
3159 last_insn = emit_move_insn (xpart, ypart);
3165 /* Show the output dies here. This is necessary for SUBREGs
3166 of pseudos since we cannot track their lifetimes correctly;
3167 hard regs shouldn't appear here except as return values.
3168 We never want to emit such a clobber after reload. */
3170 && ! (reload_in_progress || reload_completed)
3171 && need_clobber != 0)
3172 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3182 /* If Y is representable exactly in a narrower mode, and the target can
3183 perform the extension directly from constant or memory, then emit the
3184 move as an extension. */
3187 compress_float_constant (rtx x, rtx y)
3189 enum machine_mode dstmode = GET_MODE (x);
3190 enum machine_mode orig_srcmode = GET_MODE (y);
3191 enum machine_mode srcmode;
3194 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3196 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3197 srcmode != orig_srcmode;
3198 srcmode = GET_MODE_WIDER_MODE (srcmode))
3201 rtx trunc_y, last_insn;
3203 /* Skip if the target can't extend this way. */
3204 ic = can_extend_p (dstmode, srcmode, 0);
3205 if (ic == CODE_FOR_nothing)
3208 /* Skip if the narrowed value isn't exact. */
3209 if (! exact_real_truncate (srcmode, &r))
3212 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3214 if (LEGITIMATE_CONSTANT_P (trunc_y))
3216 /* Skip if the target needs extra instructions to perform
3218 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3221 else if (float_extend_from_mem[dstmode][srcmode])
3222 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
3226 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3227 last_insn = get_last_insn ();
3229 if (GET_CODE (x) == REG)
3230 set_unique_reg_note (last_insn, REG_EQUAL, y);
3238 /* Pushing data onto the stack. */
3240 /* Push a block of length SIZE (perhaps variable)
3241 and return an rtx to address the beginning of the block.
3242 Note that it is not possible for the value returned to be a QUEUED.
3243 The value may be virtual_outgoing_args_rtx.
3245 EXTRA is the number of bytes of padding to push in addition to SIZE.
3246 BELOW nonzero means this padding comes at low addresses;
3247 otherwise, the padding comes at high addresses. */
3250 push_block (rtx size, int extra, int below)
3254 size = convert_modes (Pmode, ptr_mode, size, 1);
3255 if (CONSTANT_P (size))
3256 anti_adjust_stack (plus_constant (size, extra));
3257 else if (GET_CODE (size) == REG && extra == 0)
3258 anti_adjust_stack (size);
3261 temp = copy_to_mode_reg (Pmode, size);
3263 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3264 temp, 0, OPTAB_LIB_WIDEN);
3265 anti_adjust_stack (temp);
3268 #ifndef STACK_GROWS_DOWNWARD
3274 temp = virtual_outgoing_args_rtx;
3275 if (extra != 0 && below)
3276 temp = plus_constant (temp, extra);
3280 if (GET_CODE (size) == CONST_INT)
3281 temp = plus_constant (virtual_outgoing_args_rtx,
3282 -INTVAL (size) - (below ? 0 : extra));
3283 else if (extra != 0 && !below)
3284 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3285 negate_rtx (Pmode, plus_constant (size, extra)));
3287 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3288 negate_rtx (Pmode, size));
3291 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3294 #ifdef PUSH_ROUNDING
3296 /* Emit single push insn. */
3299 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3302 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3304 enum insn_code icode;
3305 insn_operand_predicate_fn pred;
3307 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3308 /* If there is push pattern, use it. Otherwise try old way of throwing
3309 MEM representing push operation to move expander. */
3310 icode = push_optab->handlers[(int) mode].insn_code;
3311 if (icode != CODE_FOR_nothing)
3313 if (((pred = insn_data[(int) icode].operand[0].predicate)
3314 && !((*pred) (x, mode))))
3315 x = force_reg (mode, x);
3316 emit_insn (GEN_FCN (icode) (x));
3319 if (GET_MODE_SIZE (mode) == rounded_size)
3320 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3321 /* If we are to pad downward, adjust the stack pointer first and
3322 then store X into the stack location using an offset. This is
3323 because emit_move_insn does not know how to pad; it does not have
3325 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3327 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3328 HOST_WIDE_INT offset;
3330 emit_move_insn (stack_pointer_rtx,
3331 expand_binop (Pmode,
3332 #ifdef STACK_GROWS_DOWNWARD
3338 GEN_INT (rounded_size),
3339 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3341 offset = (HOST_WIDE_INT) padding_size;
3342 #ifdef STACK_GROWS_DOWNWARD
3343 if (STACK_PUSH_CODE == POST_DEC)
3344 /* We have already decremented the stack pointer, so get the
3346 offset += (HOST_WIDE_INT) rounded_size;
3348 if (STACK_PUSH_CODE == POST_INC)
3349 /* We have already incremented the stack pointer, so get the
3351 offset -= (HOST_WIDE_INT) rounded_size;
3353 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3357 #ifdef STACK_GROWS_DOWNWARD
3358 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3359 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3360 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3362 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3363 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3364 GEN_INT (rounded_size));
3366 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3369 dest = gen_rtx_MEM (mode, dest_addr);
3373 set_mem_attributes (dest, type, 1);
3375 if (flag_optimize_sibling_calls)
3376 /* Function incoming arguments may overlap with sibling call
3377 outgoing arguments and we cannot allow reordering of reads
3378 from function arguments with stores to outgoing arguments
3379 of sibling calls. */
3380 set_mem_alias_set (dest, 0);
3382 emit_move_insn (dest, x);
3386 /* Generate code to push X onto the stack, assuming it has mode MODE and
3388 MODE is redundant except when X is a CONST_INT (since they don't
3390 SIZE is an rtx for the size of data to be copied (in bytes),
3391 needed only if X is BLKmode.
3393 ALIGN (in bits) is maximum alignment we can assume.
3395 If PARTIAL and REG are both nonzero, then copy that many of the first
3396 words of X into registers starting with REG, and push the rest of X.
3397 The amount of space pushed is decreased by PARTIAL words,
3398 rounded *down* to a multiple of PARM_BOUNDARY.
3399 REG must be a hard register in this case.
3400 If REG is zero but PARTIAL is not, take any all others actions for an
3401 argument partially in registers, but do not actually load any
3404 EXTRA is the amount in bytes of extra space to leave next to this arg.
3405 This is ignored if an argument block has already been allocated.
3407 On a machine that lacks real push insns, ARGS_ADDR is the address of
3408 the bottom of the argument block for this call. We use indexing off there
3409 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3410 argument block has not been preallocated.
3412 ARGS_SO_FAR is the size of args previously pushed for this call.
3414 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3415 for arguments passed in registers. If nonzero, it will be the number
3416 of bytes required. */
3419 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3420 unsigned int align, int partial, rtx reg, int extra,
3421 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3425 enum direction stack_direction
3426 #ifdef STACK_GROWS_DOWNWARD
3432 /* Decide where to pad the argument: `downward' for below,
3433 `upward' for above, or `none' for don't pad it.
3434 Default is below for small data on big-endian machines; else above. */
3435 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3437 /* Invert direction if stack is post-decrement.
3439 if (STACK_PUSH_CODE == POST_DEC)
3440 if (where_pad != none)
3441 where_pad = (where_pad == downward ? upward : downward);
3443 xinner = x = protect_from_queue (x, 0);
3445 if (mode == BLKmode)
3447 /* Copy a block into the stack, entirely or partially. */
3450 int used = partial * UNITS_PER_WORD;
3454 if (reg && GET_CODE (reg) == PARALLEL)
3456 /* Use the size of the elt to compute offset. */
3457 rtx elt = XEXP (XVECEXP (reg, 0, 0), 0);
3458 used = partial * GET_MODE_SIZE (GET_MODE (elt));
3459 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3462 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3469 /* USED is now the # of bytes we need not copy to the stack
3470 because registers will take care of them. */
3473 xinner = adjust_address (xinner, BLKmode, used);
3475 /* If the partial register-part of the arg counts in its stack size,
3476 skip the part of stack space corresponding to the registers.
3477 Otherwise, start copying to the beginning of the stack space,
3478 by setting SKIP to 0. */
3479 skip = (reg_parm_stack_space == 0) ? 0 : used;
3481 #ifdef PUSH_ROUNDING
3482 /* Do it with several push insns if that doesn't take lots of insns
3483 and if there is no difficulty with push insns that skip bytes
3484 on the stack for alignment purposes. */
3487 && GET_CODE (size) == CONST_INT
3489 && MEM_ALIGN (xinner) >= align
3490 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3491 /* Here we avoid the case of a structure whose weak alignment
3492 forces many pushes of a small amount of data,
3493 and such small pushes do rounding that causes trouble. */
3494 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3495 || align >= BIGGEST_ALIGNMENT
3496 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3497 == (align / BITS_PER_UNIT)))
3498 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3500 /* Push padding now if padding above and stack grows down,
3501 or if padding below and stack grows up.
3502 But if space already allocated, this has already been done. */
3503 if (extra && args_addr == 0
3504 && where_pad != none && where_pad != stack_direction)
3505 anti_adjust_stack (GEN_INT (extra));
3507 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3510 #endif /* PUSH_ROUNDING */
3514 /* Otherwise make space on the stack and copy the data
3515 to the address of that space. */
3517 /* Deduct words put into registers from the size we must copy. */
3520 if (GET_CODE (size) == CONST_INT)
3521 size = GEN_INT (INTVAL (size) - used);
3523 size = expand_binop (GET_MODE (size), sub_optab, size,
3524 GEN_INT (used), NULL_RTX, 0,
3528 /* Get the address of the stack space.
3529 In this case, we do not deal with EXTRA separately.
3530 A single stack adjust will do. */
3533 temp = push_block (size, extra, where_pad == downward);
3536 else if (GET_CODE (args_so_far) == CONST_INT)
3537 temp = memory_address (BLKmode,
3538 plus_constant (args_addr,
3539 skip + INTVAL (args_so_far)));
3541 temp = memory_address (BLKmode,
3542 plus_constant (gen_rtx_PLUS (Pmode,
3547 if (!ACCUMULATE_OUTGOING_ARGS)
3549 /* If the source is referenced relative to the stack pointer,
3550 copy it to another register to stabilize it. We do not need
3551 to do this if we know that we won't be changing sp. */
3553 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3554 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3555 temp = copy_to_reg (temp);
3558 target = gen_rtx_MEM (BLKmode, temp);
3562 set_mem_attributes (target, type, 1);
3563 /* Function incoming arguments may overlap with sibling call
3564 outgoing arguments and we cannot allow reordering of reads
3565 from function arguments with stores to outgoing arguments
3566 of sibling calls. */
3567 set_mem_alias_set (target, 0);
3570 /* ALIGN may well be better aligned than TYPE, e.g. due to
3571 PARM_BOUNDARY. Assume the caller isn't lying. */
3572 set_mem_align (target, align);
3574 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3577 else if (partial > 0)
3579 /* Scalar partly in registers. */
3581 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3584 /* # words of start of argument
3585 that we must make space for but need not store. */
3586 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3587 int args_offset = INTVAL (args_so_far);
3590 /* Push padding now if padding above and stack grows down,
3591 or if padding below and stack grows up.
3592 But if space already allocated, this has already been done. */
3593 if (extra && args_addr == 0
3594 && where_pad != none && where_pad != stack_direction)
3595 anti_adjust_stack (GEN_INT (extra));
3597 /* If we make space by pushing it, we might as well push
3598 the real data. Otherwise, we can leave OFFSET nonzero
3599 and leave the space uninitialized. */
3603 /* Now NOT_STACK gets the number of words that we don't need to
3604 allocate on the stack. */
3605 not_stack = partial - offset;
3607 /* If the partial register-part of the arg counts in its stack size,
3608 skip the part of stack space corresponding to the registers.
3609 Otherwise, start copying to the beginning of the stack space,
3610 by setting SKIP to 0. */
3611 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3613 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3614 x = validize_mem (force_const_mem (mode, x));
3616 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3617 SUBREGs of such registers are not allowed. */
3618 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3619 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3620 x = copy_to_reg (x);
3622 /* Loop over all the words allocated on the stack for this arg. */
3623 /* We can do it by words, because any scalar bigger than a word
3624 has a size a multiple of a word. */
3625 #ifndef PUSH_ARGS_REVERSED
3626 for (i = not_stack; i < size; i++)
3628 for (i = size - 1; i >= not_stack; i--)
3630 if (i >= not_stack + offset)
3631 emit_push_insn (operand_subword_force (x, i, mode),
3632 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3634 GEN_INT (args_offset + ((i - not_stack + skip)
3636 reg_parm_stack_space, alignment_pad);
3643 /* Push padding now if padding above and stack grows down,
3644 or if padding below and stack grows up.
3645 But if space already allocated, this has already been done. */
3646 if (extra && args_addr == 0
3647 && where_pad != none && where_pad != stack_direction)
3648 anti_adjust_stack (GEN_INT (extra));
3650 #ifdef PUSH_ROUNDING
3651 if (args_addr == 0 && PUSH_ARGS)
3652 emit_single_push_insn (mode, x, type);
3656 if (GET_CODE (args_so_far) == CONST_INT)
3658 = memory_address (mode,
3659 plus_constant (args_addr,
3660 INTVAL (args_so_far)));
3662 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3664 dest = gen_rtx_MEM (mode, addr);
3667 set_mem_attributes (dest, type, 1);
3668 /* Function incoming arguments may overlap with sibling call
3669 outgoing arguments and we cannot allow reordering of reads
3670 from function arguments with stores to outgoing arguments
3671 of sibling calls. */
3672 set_mem_alias_set (dest, 0);
3675 emit_move_insn (dest, x);
3679 /* If part should go in registers, copy that part
3680 into the appropriate registers. Do this now, at the end,
3681 since mem-to-mem copies above may do function calls. */
3682 if (partial > 0 && reg != 0)
3684 /* Handle calls that pass values in multiple non-contiguous locations.
3685 The Irix 6 ABI has examples of this. */
3686 if (GET_CODE (reg) == PARALLEL)
3687 emit_group_load (reg, x, type, -1);
3689 move_block_to_reg (REGNO (reg), x, partial, mode);
3692 if (extra && args_addr == 0 && where_pad == stack_direction)
3693 anti_adjust_stack (GEN_INT (extra));
3695 if (alignment_pad && args_addr == 0)
3696 anti_adjust_stack (alignment_pad);
3699 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3703 get_subtarget (rtx x)
3706 /* Only registers can be subtargets. */
3707 || GET_CODE (x) != REG
3708 /* If the register is readonly, it can't be set more than once. */
3709 || RTX_UNCHANGING_P (x)
3710 /* Don't use hard regs to avoid extending their life. */
3711 || REGNO (x) < FIRST_PSEUDO_REGISTER
3712 /* Avoid subtargets inside loops,
3713 since they hide some invariant expressions. */
3714 || preserve_subexpressions_p ())
3718 /* Expand an assignment that stores the value of FROM into TO.
3719 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3720 (This may contain a QUEUED rtx;
3721 if the value is constant, this rtx is a constant.)
3722 Otherwise, the returned value is NULL_RTX. */
3725 expand_assignment (tree to, tree from, int want_value)
3730 /* Don't crash if the lhs of the assignment was erroneous. */
3732 if (TREE_CODE (to) == ERROR_MARK)
3734 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3735 return want_value ? result : NULL_RTX;
3738 /* Assignment of a structure component needs special treatment
3739 if the structure component's rtx is not simply a MEM.
3740 Assignment of an array element at a constant index, and assignment of
3741 an array element in an unaligned packed structure field, has the same
3744 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3745 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF
3746 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
3748 enum machine_mode mode1;
3749 HOST_WIDE_INT bitsize, bitpos;
3757 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3758 &unsignedp, &volatilep);
3760 /* If we are going to use store_bit_field and extract_bit_field,
3761 make sure to_rtx will be safe for multiple use. */
3763 if (mode1 == VOIDmode && want_value)
3764 tem = stabilize_reference (tem);
3766 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3770 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
3772 if (GET_CODE (to_rtx) != MEM)
3775 #ifdef POINTERS_EXTEND_UNSIGNED
3776 if (GET_MODE (offset_rtx) != Pmode)
3777 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
3779 if (GET_MODE (offset_rtx) != ptr_mode)
3780 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3783 /* A constant address in TO_RTX can have VOIDmode, we must not try
3784 to call force_reg for that case. Avoid that case. */
3785 if (GET_CODE (to_rtx) == MEM
3786 && GET_MODE (to_rtx) == BLKmode
3787 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3789 && (bitpos % bitsize) == 0
3790 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3791 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
3793 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3797 to_rtx = offset_address (to_rtx, offset_rtx,
3798 highest_pow2_factor_for_type (TREE_TYPE (to),
3802 if (GET_CODE (to_rtx) == MEM)
3804 /* If the field is at offset zero, we could have been given the
3805 DECL_RTX of the parent struct. Don't munge it. */
3806 to_rtx = shallow_copy_rtx (to_rtx);
3808 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
3811 /* Deal with volatile and readonly fields. The former is only done
3812 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3813 if (volatilep && GET_CODE (to_rtx) == MEM)
3815 if (to_rtx == orig_to_rtx)
3816 to_rtx = copy_rtx (to_rtx);
3817 MEM_VOLATILE_P (to_rtx) = 1;
3820 if (TREE_CODE (to) == COMPONENT_REF
3821 && TREE_READONLY (TREE_OPERAND (to, 1))
3822 /* We can't assert that a MEM won't be set more than once
3823 if the component is not addressable because another
3824 non-addressable component may be referenced by the same MEM. */
3825 && ! (GET_CODE (to_rtx) == MEM && ! can_address_p (to)))
3827 if (to_rtx == orig_to_rtx)
3828 to_rtx = copy_rtx (to_rtx);
3829 RTX_UNCHANGING_P (to_rtx) = 1;
3832 if (GET_CODE (to_rtx) == MEM && ! can_address_p (to))
3834 if (to_rtx == orig_to_rtx)
3835 to_rtx = copy_rtx (to_rtx);
3836 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3839 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3841 /* Spurious cast for HPUX compiler. */
3842 ? ((enum machine_mode)
3843 TYPE_MODE (TREE_TYPE (to)))
3845 unsignedp, TREE_TYPE (tem), get_alias_set (to));
3847 preserve_temp_slots (result);
3851 /* If the value is meaningful, convert RESULT to the proper mode.
3852 Otherwise, return nothing. */
3853 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3854 TYPE_MODE (TREE_TYPE (from)),
3856 TREE_UNSIGNED (TREE_TYPE (to)))
3860 /* If the rhs is a function call and its value is not an aggregate,
3861 call the function before we start to compute the lhs.
3862 This is needed for correct code for cases such as
3863 val = setjmp (buf) on machines where reference to val
3864 requires loading up part of an address in a separate insn.
3866 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3867 since it might be a promoted variable where the zero- or sign- extension
3868 needs to be done. Handling this in the normal way is safe because no
3869 computation is done before the call. */
3870 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
3871 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3872 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3873 && GET_CODE (DECL_RTL (to)) == REG))
3878 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3880 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3882 /* Handle calls that return values in multiple non-contiguous locations.
3883 The Irix 6 ABI has examples of this. */
3884 if (GET_CODE (to_rtx) == PARALLEL)
3885 emit_group_load (to_rtx, value, TREE_TYPE (from),
3886 int_size_in_bytes (TREE_TYPE (from)));
3887 else if (GET_MODE (to_rtx) == BLKmode)
3888 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
3891 if (POINTER_TYPE_P (TREE_TYPE (to)))
3892 value = convert_memory_address (GET_MODE (to_rtx), value);
3893 emit_move_insn (to_rtx, value);
3895 preserve_temp_slots (to_rtx);
3898 return want_value ? to_rtx : NULL_RTX;
3901 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3902 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3905 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3907 /* Don't move directly into a return register. */
3908 if (TREE_CODE (to) == RESULT_DECL
3909 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3914 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3916 if (GET_CODE (to_rtx) == PARALLEL)
3917 emit_group_load (to_rtx, temp, TREE_TYPE (from),
3918 int_size_in_bytes (TREE_TYPE (from)));
3920 emit_move_insn (to_rtx, temp);
3922 preserve_temp_slots (to_rtx);
3925 return want_value ? to_rtx : NULL_RTX;
3928 /* In case we are returning the contents of an object which overlaps
3929 the place the value is being stored, use a safe function when copying
3930 a value through a pointer into a structure value return block. */
3931 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3932 && current_function_returns_struct
3933 && !current_function_returns_pcc_struct)
3938 size = expr_size (from);
3939 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
3941 if (TARGET_MEM_FUNCTIONS)
3942 emit_library_call (memmove_libfunc, LCT_NORMAL,
3943 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3944 XEXP (from_rtx, 0), Pmode,
3945 convert_to_mode (TYPE_MODE (sizetype),
3946 size, TREE_UNSIGNED (sizetype)),
3947 TYPE_MODE (sizetype));
3949 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3950 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3951 XEXP (to_rtx, 0), Pmode,
3952 convert_to_mode (TYPE_MODE (integer_type_node),
3954 TREE_UNSIGNED (integer_type_node)),
3955 TYPE_MODE (integer_type_node));
3957 preserve_temp_slots (to_rtx);
3960 return want_value ? to_rtx : NULL_RTX;
3963 /* Compute FROM and store the value in the rtx we got. */
3966 result = store_expr (from, to_rtx, want_value);
3967 preserve_temp_slots (result);
3970 return want_value ? result : NULL_RTX;
3973 /* Generate code for computing expression EXP,
3974 and storing the value into TARGET.
3975 TARGET may contain a QUEUED rtx.
3977 If WANT_VALUE & 1 is nonzero, return a copy of the value
3978 not in TARGET, so that we can be sure to use the proper
3979 value in a containing expression even if TARGET has something
3980 else stored in it. If possible, we copy the value through a pseudo
3981 and return that pseudo. Or, if the value is constant, we try to
3982 return the constant. In some cases, we return a pseudo
3983 copied *from* TARGET.
3985 If the mode is BLKmode then we may return TARGET itself.
3986 It turns out that in BLKmode it doesn't cause a problem.
3987 because C has no operators that could combine two different
3988 assignments into the same BLKmode object with different values
3989 with no sequence point. Will other languages need this to
3992 If WANT_VALUE & 1 is 0, we return NULL, to make sure
3993 to catch quickly any cases where the caller uses the value
3994 and fails to set WANT_VALUE.
3996 If WANT_VALUE & 2 is set, this is a store into a call param on the
3997 stack, and block moves may need to be treated specially. */
4000 store_expr (tree exp, rtx target, int want_value)
4003 rtx alt_rtl = NULL_RTX;
4004 int dont_return_target = 0;
4005 int dont_store_target = 0;
4007 if (VOID_TYPE_P (TREE_TYPE (exp)))
4009 /* C++ can generate ?: expressions with a throw expression in one
4010 branch and an rvalue in the other. Here, we resolve attempts to
4011 store the throw expression's nonexistent result. */
4014 expand_expr (exp, const0_rtx, VOIDmode, 0);
4017 if (TREE_CODE (exp) == COMPOUND_EXPR)
4019 /* Perform first part of compound expression, then assign from second
4021 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4022 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4024 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4026 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4028 /* For conditional expression, get safe form of the target. Then
4029 test the condition, doing the appropriate assignment on either
4030 side. This avoids the creation of unnecessary temporaries.
4031 For non-BLKmode, it is more efficient not to do this. */
4033 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4036 target = protect_from_queue (target, 1);
4038 do_pending_stack_adjust ();
4040 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4041 start_cleanup_deferral ();
4042 store_expr (TREE_OPERAND (exp, 1), target, want_value & 2);
4043 end_cleanup_deferral ();
4045 emit_jump_insn (gen_jump (lab2));
4048 start_cleanup_deferral ();
4049 store_expr (TREE_OPERAND (exp, 2), target, want_value & 2);
4050 end_cleanup_deferral ();
4055 return want_value & 1 ? target : NULL_RTX;
4057 else if (queued_subexp_p (target))
4058 /* If target contains a postincrement, let's not risk
4059 using it as the place to generate the rhs. */
4061 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4063 /* Expand EXP into a new pseudo. */
4064 temp = gen_reg_rtx (GET_MODE (target));
4065 temp = expand_expr (exp, temp, GET_MODE (target),
4067 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4070 temp = expand_expr (exp, NULL_RTX, GET_MODE (target),
4072 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4074 /* If target is volatile, ANSI requires accessing the value
4075 *from* the target, if it is accessed. So make that happen.
4076 In no case return the target itself. */
4077 if (! MEM_VOLATILE_P (target) && (want_value & 1) != 0)
4078 dont_return_target = 1;
4080 else if ((want_value & 1) != 0
4081 && GET_CODE (target) == MEM
4082 && ! MEM_VOLATILE_P (target)
4083 && GET_MODE (target) != BLKmode)
4084 /* If target is in memory and caller wants value in a register instead,
4085 arrange that. Pass TARGET as target for expand_expr so that,
4086 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4087 We know expand_expr will not use the target in that case.
4088 Don't do this if TARGET is volatile because we are supposed
4089 to write it and then read it. */
4091 temp = expand_expr (exp, target, GET_MODE (target),
4092 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4093 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4095 /* If TEMP is already in the desired TARGET, only copy it from
4096 memory and don't store it there again. */
4098 || (rtx_equal_p (temp, target)
4099 && ! side_effects_p (temp) && ! side_effects_p (target)))
4100 dont_store_target = 1;
4101 temp = copy_to_reg (temp);
4103 dont_return_target = 1;
4105 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4106 /* If this is a scalar in a register that is stored in a wider mode
4107 than the declared mode, compute the result into its declared mode
4108 and then convert to the wider mode. Our value is the computed
4111 rtx inner_target = 0;
4113 /* If we don't want a value, we can do the conversion inside EXP,
4114 which will often result in some optimizations. Do the conversion
4115 in two steps: first change the signedness, if needed, then
4116 the extend. But don't do this if the type of EXP is a subtype
4117 of something else since then the conversion might involve
4118 more than just converting modes. */
4119 if ((want_value & 1) == 0
4120 && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4121 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4123 if (TREE_UNSIGNED (TREE_TYPE (exp))
4124 != SUBREG_PROMOTED_UNSIGNED_P (target))
4126 ((*lang_hooks.types.signed_or_unsigned_type)
4127 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4129 exp = convert ((*lang_hooks.types.type_for_mode)
4130 (GET_MODE (SUBREG_REG (target)),
4131 SUBREG_PROMOTED_UNSIGNED_P (target)),
4134 inner_target = SUBREG_REG (target);
4137 temp = expand_expr (exp, inner_target, VOIDmode,
4138 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4140 /* If TEMP is a MEM and we want a result value, make the access
4141 now so it gets done only once. Strictly speaking, this is
4142 only necessary if the MEM is volatile, or if the address
4143 overlaps TARGET. But not performing the load twice also
4144 reduces the amount of rtl we generate and then have to CSE. */
4145 if (GET_CODE (temp) == MEM && (want_value & 1) != 0)
4146 temp = copy_to_reg (temp);
4148 /* If TEMP is a VOIDmode constant, use convert_modes to make
4149 sure that we properly convert it. */
4150 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4152 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4153 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4154 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4155 GET_MODE (target), temp,
4156 SUBREG_PROMOTED_UNSIGNED_P (target));
4159 convert_move (SUBREG_REG (target), temp,
4160 SUBREG_PROMOTED_UNSIGNED_P (target));
4162 /* If we promoted a constant, change the mode back down to match
4163 target. Otherwise, the caller might get confused by a result whose
4164 mode is larger than expected. */
4166 if ((want_value & 1) != 0 && GET_MODE (temp) != GET_MODE (target))
4168 if (GET_MODE (temp) != VOIDmode)
4170 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4171 SUBREG_PROMOTED_VAR_P (temp) = 1;
4172 SUBREG_PROMOTED_UNSIGNED_SET (temp,
4173 SUBREG_PROMOTED_UNSIGNED_P (target));
4176 temp = convert_modes (GET_MODE (target),
4177 GET_MODE (SUBREG_REG (target)),
4178 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4181 return want_value & 1 ? temp : NULL_RTX;
4185 temp = expand_expr_real (exp, target, GET_MODE (target),
4187 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
4189 /* Return TARGET if it's a specified hardware register.
4190 If TARGET is a volatile mem ref, either return TARGET
4191 or return a reg copied *from* TARGET; ANSI requires this.
4193 Otherwise, if TEMP is not TARGET, return TEMP
4194 if it is constant (for efficiency),
4195 or if we really want the correct value. */
4196 if (!(target && GET_CODE (target) == REG
4197 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4198 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4199 && ! rtx_equal_p (temp, target)
4200 && (CONSTANT_P (temp) || (want_value & 1) != 0))
4201 dont_return_target = 1;
4204 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4205 the same as that of TARGET, adjust the constant. This is needed, for
4206 example, in case it is a CONST_DOUBLE and we want only a word-sized
4208 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4209 && TREE_CODE (exp) != ERROR_MARK
4210 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4211 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4212 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4214 /* If value was not generated in the target, store it there.
4215 Convert the value to TARGET's type first if necessary.
4216 If TEMP and TARGET compare equal according to rtx_equal_p, but
4217 one or both of them are volatile memory refs, we have to distinguish
4219 - expand_expr has used TARGET. In this case, we must not generate
4220 another copy. This can be detected by TARGET being equal according
4222 - expand_expr has not used TARGET - that means that the source just
4223 happens to have the same RTX form. Since temp will have been created
4224 by expand_expr, it will compare unequal according to == .
4225 We must generate a copy in this case, to reach the correct number
4226 of volatile memory references. */
4228 if ((! rtx_equal_p (temp, target)
4229 || (temp != target && (side_effects_p (temp)
4230 || side_effects_p (target))))
4231 && TREE_CODE (exp) != ERROR_MARK
4232 && ! dont_store_target
4233 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4234 but TARGET is not valid memory reference, TEMP will differ
4235 from TARGET although it is really the same location. */
4236 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
4237 /* If there's nothing to copy, don't bother. Don't call expr_size
4238 unless necessary, because some front-ends (C++) expr_size-hook
4239 aborts on objects that are not supposed to be bit-copied or
4241 && expr_size (exp) != const0_rtx)
4243 target = protect_from_queue (target, 1);
4244 if (GET_MODE (temp) != GET_MODE (target)
4245 && GET_MODE (temp) != VOIDmode)
4247 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4248 if (dont_return_target)
4250 /* In this case, we will return TEMP,
4251 so make sure it has the proper mode.
4252 But don't forget to store the value into TARGET. */
4253 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4254 emit_move_insn (target, temp);
4257 convert_move (target, temp, unsignedp);
4260 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4262 /* Handle copying a string constant into an array. The string
4263 constant may be shorter than the array. So copy just the string's
4264 actual length, and clear the rest. First get the size of the data
4265 type of the string, which is actually the size of the target. */
4266 rtx size = expr_size (exp);
4268 if (GET_CODE (size) == CONST_INT
4269 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4270 emit_block_move (target, temp, size,
4272 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4275 /* Compute the size of the data to copy from the string. */
4277 = size_binop (MIN_EXPR,
4278 make_tree (sizetype, size),
4279 size_int (TREE_STRING_LENGTH (exp)));
4281 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4283 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4286 /* Copy that much. */
4287 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4288 TREE_UNSIGNED (sizetype));
4289 emit_block_move (target, temp, copy_size_rtx,
4291 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4293 /* Figure out how much is left in TARGET that we have to clear.
4294 Do all calculations in ptr_mode. */
4295 if (GET_CODE (copy_size_rtx) == CONST_INT)
4297 size = plus_constant (size, -INTVAL (copy_size_rtx));
4298 target = adjust_address (target, BLKmode,
4299 INTVAL (copy_size_rtx));
4303 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4304 copy_size_rtx, NULL_RTX, 0,
4307 #ifdef POINTERS_EXTEND_UNSIGNED
4308 if (GET_MODE (copy_size_rtx) != Pmode)
4309 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4310 TREE_UNSIGNED (sizetype));
4313 target = offset_address (target, copy_size_rtx,
4314 highest_pow2_factor (copy_size));
4315 label = gen_label_rtx ();
4316 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4317 GET_MODE (size), 0, label);
4320 if (size != const0_rtx)
4321 clear_storage (target, size);
4327 /* Handle calls that return values in multiple non-contiguous locations.
4328 The Irix 6 ABI has examples of this. */
4329 else if (GET_CODE (target) == PARALLEL)
4330 emit_group_load (target, temp, TREE_TYPE (exp),
4331 int_size_in_bytes (TREE_TYPE (exp)));
4332 else if (GET_MODE (temp) == BLKmode)
4333 emit_block_move (target, temp, expr_size (exp),
4335 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4337 emit_move_insn (target, temp);
4340 /* If we don't want a value, return NULL_RTX. */
4341 if ((want_value & 1) == 0)
4344 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4345 ??? The latter test doesn't seem to make sense. */
4346 else if (dont_return_target && GET_CODE (temp) != MEM)
4349 /* Return TARGET itself if it is a hard register. */
4350 else if ((want_value & 1) != 0
4351 && GET_MODE (target) != BLKmode
4352 && ! (GET_CODE (target) == REG
4353 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4354 return copy_to_reg (target);
4360 /* Return 1 if EXP just contains zeros. FIXME merge with initializer_zerop. */
4363 is_zeros_p (tree exp)
4367 switch (TREE_CODE (exp))
4371 case NON_LVALUE_EXPR:
4372 case VIEW_CONVERT_EXPR:
4373 return is_zeros_p (TREE_OPERAND (exp, 0));
4376 return integer_zerop (exp);
4380 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4383 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4386 for (elt = TREE_VECTOR_CST_ELTS (exp); elt;
4387 elt = TREE_CHAIN (elt))
4388 if (!is_zeros_p (TREE_VALUE (elt)))
4394 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4395 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4396 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4397 if (! is_zeros_p (TREE_VALUE (elt)))
4407 /* Return 1 if EXP contains mostly (3/4) zeros. */
4410 mostly_zeros_p (tree exp)
4412 if (TREE_CODE (exp) == CONSTRUCTOR)
4414 int elts = 0, zeros = 0;
4415 tree elt = CONSTRUCTOR_ELTS (exp);
4416 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4418 /* If there are no ranges of true bits, it is all zero. */
4419 return elt == NULL_TREE;
4421 for (; elt; elt = TREE_CHAIN (elt))
4423 /* We do not handle the case where the index is a RANGE_EXPR,
4424 so the statistic will be somewhat inaccurate.
4425 We do make a more accurate count in store_constructor itself,
4426 so since this function is only used for nested array elements,
4427 this should be close enough. */
4428 if (mostly_zeros_p (TREE_VALUE (elt)))
4433 return 4 * zeros >= 3 * elts;
4436 return is_zeros_p (exp);
4439 /* Helper function for store_constructor.
4440 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4441 TYPE is the type of the CONSTRUCTOR, not the element type.
4442 CLEARED is as for store_constructor.
4443 ALIAS_SET is the alias set to use for any stores.
4445 This provides a recursive shortcut back to store_constructor when it isn't
4446 necessary to go through store_field. This is so that we can pass through
4447 the cleared field to let store_constructor know that we may not have to
4448 clear a substructure if the outer structure has already been cleared. */
4451 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4452 HOST_WIDE_INT bitpos, enum machine_mode mode,
4453 tree exp, tree type, int cleared, int alias_set)
4455 if (TREE_CODE (exp) == CONSTRUCTOR
4456 && bitpos % BITS_PER_UNIT == 0
4457 /* If we have a nonzero bitpos for a register target, then we just
4458 let store_field do the bitfield handling. This is unlikely to
4459 generate unnecessary clear instructions anyways. */
4460 && (bitpos == 0 || GET_CODE (target) == MEM))
4462 if (GET_CODE (target) == MEM)
4464 = adjust_address (target,
4465 GET_MODE (target) == BLKmode
4467 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4468 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4471 /* Update the alias set, if required. */
4472 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4473 && MEM_ALIAS_SET (target) != 0)
4475 target = copy_rtx (target);
4476 set_mem_alias_set (target, alias_set);
4479 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4482 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4486 /* Store the value of constructor EXP into the rtx TARGET.
4487 TARGET is either a REG or a MEM; we know it cannot conflict, since
4488 safe_from_p has been called.
4489 CLEARED is true if TARGET is known to have been zero'd.
4490 SIZE is the number of bytes of TARGET we are allowed to modify: this
4491 may not be the same as the size of EXP if we are assigning to a field
4492 which has been packed to exclude padding bits. */
4495 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
4497 tree type = TREE_TYPE (exp);
4498 #ifdef WORD_REGISTER_OPERATIONS
4499 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4502 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4503 || TREE_CODE (type) == QUAL_UNION_TYPE)
4507 /* If size is zero or the target is already cleared, do nothing. */
4508 if (size == 0 || cleared)
4510 /* We either clear the aggregate or indicate the value is dead. */
4511 else if ((TREE_CODE (type) == UNION_TYPE
4512 || TREE_CODE (type) == QUAL_UNION_TYPE)
4513 && ! CONSTRUCTOR_ELTS (exp))
4514 /* If the constructor is empty, clear the union. */
4516 clear_storage (target, expr_size (exp));
4520 /* If we are building a static constructor into a register,
4521 set the initial value as zero so we can fold the value into
4522 a constant. But if more than one register is involved,
4523 this probably loses. */
4524 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4525 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4527 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4531 /* If the constructor has fewer fields than the structure
4532 or if we are initializing the structure to mostly zeros,
4533 clear the whole structure first. Don't do this if TARGET is a
4534 register whose mode size isn't equal to SIZE since clear_storage
4535 can't handle this case. */
4536 else if (((list_length (CONSTRUCTOR_ELTS (exp)) != fields_length (type))
4537 || mostly_zeros_p (exp))
4538 && (GET_CODE (target) != REG
4539 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4542 rtx xtarget = target;
4544 if (readonly_fields_p (type))
4546 xtarget = copy_rtx (xtarget);
4547 RTX_UNCHANGING_P (xtarget) = 1;
4550 clear_storage (xtarget, GEN_INT (size));
4555 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4557 /* Store each element of the constructor into
4558 the corresponding field of TARGET. */
4560 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4562 tree field = TREE_PURPOSE (elt);
4563 tree value = TREE_VALUE (elt);
4564 enum machine_mode mode;
4565 HOST_WIDE_INT bitsize;
4566 HOST_WIDE_INT bitpos = 0;
4568 rtx to_rtx = target;
4570 /* Just ignore missing fields.
4571 We cleared the whole structure, above,
4572 if any fields are missing. */
4576 if (cleared && is_zeros_p (value))
4579 if (host_integerp (DECL_SIZE (field), 1))
4580 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4584 mode = DECL_MODE (field);
4585 if (DECL_BIT_FIELD (field))
4588 offset = DECL_FIELD_OFFSET (field);
4589 if (host_integerp (offset, 0)
4590 && host_integerp (bit_position (field), 0))
4592 bitpos = int_bit_position (field);
4596 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4602 if (CONTAINS_PLACEHOLDER_P (offset))
4603 offset = build (WITH_RECORD_EXPR, sizetype,
4604 offset, make_tree (TREE_TYPE (exp), target));
4606 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4607 if (GET_CODE (to_rtx) != MEM)
4610 #ifdef POINTERS_EXTEND_UNSIGNED
4611 if (GET_MODE (offset_rtx) != Pmode)
4612 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4614 if (GET_MODE (offset_rtx) != ptr_mode)
4615 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4618 to_rtx = offset_address (to_rtx, offset_rtx,
4619 highest_pow2_factor (offset));
4622 /* If the constructor has been cleared, setting RTX_UNCHANGING_P
4623 on the MEM might lead to scheduling the clearing after the
4625 if (TREE_READONLY (field) && !cleared)
4627 if (GET_CODE (to_rtx) == MEM)
4628 to_rtx = copy_rtx (to_rtx);
4630 RTX_UNCHANGING_P (to_rtx) = 1;
4633 #ifdef WORD_REGISTER_OPERATIONS
4634 /* If this initializes a field that is smaller than a word, at the
4635 start of a word, try to widen it to a full word.
4636 This special case allows us to output C++ member function
4637 initializations in a form that the optimizers can understand. */
4638 if (GET_CODE (target) == REG
4639 && bitsize < BITS_PER_WORD
4640 && bitpos % BITS_PER_WORD == 0
4641 && GET_MODE_CLASS (mode) == MODE_INT
4642 && TREE_CODE (value) == INTEGER_CST
4644 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4646 tree type = TREE_TYPE (value);
4648 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4650 type = (*lang_hooks.types.type_for_size)
4651 (BITS_PER_WORD, TREE_UNSIGNED (type));
4652 value = convert (type, value);
4655 if (BYTES_BIG_ENDIAN)
4657 = fold (build (LSHIFT_EXPR, type, value,
4658 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4659 bitsize = BITS_PER_WORD;
4664 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4665 && DECL_NONADDRESSABLE_P (field))
4667 to_rtx = copy_rtx (to_rtx);
4668 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4671 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4672 value, type, cleared,
4673 get_alias_set (TREE_TYPE (field)));
4676 else if (TREE_CODE (type) == ARRAY_TYPE
4677 || TREE_CODE (type) == VECTOR_TYPE)
4682 tree domain = TYPE_DOMAIN (type);
4683 tree elttype = TREE_TYPE (type);
4685 HOST_WIDE_INT minelt = 0;
4686 HOST_WIDE_INT maxelt = 0;
4690 unsigned n_elts = 0;
4692 /* Vectors are like arrays, but the domain is stored via an array
4694 if (TREE_CODE (type) == VECTOR_TYPE)
4696 /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
4697 the same field as TYPE_DOMAIN, we are not guaranteed that
4699 domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
4700 domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
4701 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
4703 enum machine_mode mode = GET_MODE (target);
4705 icode = (int) vec_init_optab->handlers[mode].insn_code;
4706 if (icode != CODE_FOR_nothing)
4710 elt_size = GET_MODE_SIZE (GET_MODE_INNER (mode));
4711 n_elts = (GET_MODE_SIZE (mode) / elt_size);
4712 vector = alloca (n_elts);
4713 for (i = 0; i < n_elts; i++)
4714 vector [i] = CONST0_RTX (GET_MODE_INNER (mode));
4719 const_bounds_p = (TYPE_MIN_VALUE (domain)
4720 && TYPE_MAX_VALUE (domain)
4721 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4722 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4724 /* If we have constant bounds for the range of the type, get them. */
4727 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4728 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4731 /* If the constructor has fewer elements than the array,
4732 clear the whole array first. Similarly if this is
4733 static constructor of a non-BLKmode object. */
4734 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4738 HOST_WIDE_INT count = 0, zero_count = 0;
4739 need_to_clear = ! const_bounds_p;
4741 /* This loop is a more accurate version of the loop in
4742 mostly_zeros_p (it handles RANGE_EXPR in an index).
4743 It is also needed to check for missing elements. */
4744 for (elt = CONSTRUCTOR_ELTS (exp);
4745 elt != NULL_TREE && ! need_to_clear;
4746 elt = TREE_CHAIN (elt))
4748 tree index = TREE_PURPOSE (elt);
4749 HOST_WIDE_INT this_node_count;
4751 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4753 tree lo_index = TREE_OPERAND (index, 0);
4754 tree hi_index = TREE_OPERAND (index, 1);
4756 if (! host_integerp (lo_index, 1)
4757 || ! host_integerp (hi_index, 1))
4763 this_node_count = (tree_low_cst (hi_index, 1)
4764 - tree_low_cst (lo_index, 1) + 1);
4767 this_node_count = 1;
4769 count += this_node_count;
4770 if (mostly_zeros_p (TREE_VALUE (elt)))
4771 zero_count += this_node_count;
4774 /* Clear the entire array first if there are any missing elements,
4775 or if the incidence of zero elements is >= 75%. */
4777 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4781 if (need_to_clear && size > 0 && !vector)
4786 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4788 clear_storage (target, GEN_INT (size));
4792 else if (REG_P (target))
4793 /* Inform later passes that the old value is dead. */
4794 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4796 /* Store each element of the constructor into
4797 the corresponding element of TARGET, determined
4798 by counting the elements. */
4799 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4801 elt = TREE_CHAIN (elt), i++)
4803 enum machine_mode mode;
4804 HOST_WIDE_INT bitsize;
4805 HOST_WIDE_INT bitpos;
4807 tree value = TREE_VALUE (elt);
4808 tree index = TREE_PURPOSE (elt);
4809 rtx xtarget = target;
4811 if (cleared && is_zeros_p (value))
4814 unsignedp = TREE_UNSIGNED (elttype);
4815 mode = TYPE_MODE (elttype);
4816 if (mode == BLKmode)
4817 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4818 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4821 bitsize = GET_MODE_BITSIZE (mode);
4823 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4825 tree lo_index = TREE_OPERAND (index, 0);
4826 tree hi_index = TREE_OPERAND (index, 1);
4827 rtx index_r, pos_rtx, loop_end;
4828 struct nesting *loop;
4829 HOST_WIDE_INT lo, hi, count;
4835 /* If the range is constant and "small", unroll the loop. */
4837 && host_integerp (lo_index, 0)
4838 && host_integerp (hi_index, 0)
4839 && (lo = tree_low_cst (lo_index, 0),
4840 hi = tree_low_cst (hi_index, 0),
4841 count = hi - lo + 1,
4842 (GET_CODE (target) != MEM
4844 || (host_integerp (TYPE_SIZE (elttype), 1)
4845 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4848 lo -= minelt; hi -= minelt;
4849 for (; lo <= hi; lo++)
4851 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4853 if (GET_CODE (target) == MEM
4854 && !MEM_KEEP_ALIAS_SET_P (target)
4855 && TREE_CODE (type) == ARRAY_TYPE
4856 && TYPE_NONALIASED_COMPONENT (type))
4858 target = copy_rtx (target);
4859 MEM_KEEP_ALIAS_SET_P (target) = 1;
4862 store_constructor_field
4863 (target, bitsize, bitpos, mode, value, type, cleared,
4864 get_alias_set (elttype));
4869 expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4870 loop_end = gen_label_rtx ();
4872 unsignedp = TREE_UNSIGNED (domain);
4874 index = build_decl (VAR_DECL, NULL_TREE, domain);
4877 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4879 SET_DECL_RTL (index, index_r);
4880 if (TREE_CODE (value) == SAVE_EXPR
4881 && SAVE_EXPR_RTL (value) == 0)
4883 /* Make sure value gets expanded once before the
4885 expand_expr (value, const0_rtx, VOIDmode, 0);
4888 store_expr (lo_index, index_r, 0);
4889 loop = expand_start_loop (0);
4891 /* Assign value to element index. */
4893 = convert (ssizetype,
4894 fold (build (MINUS_EXPR, TREE_TYPE (index),
4895 index, TYPE_MIN_VALUE (domain))));
4896 position = size_binop (MULT_EXPR, position,
4898 TYPE_SIZE_UNIT (elttype)));
4900 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4901 xtarget = offset_address (target, pos_rtx,
4902 highest_pow2_factor (position));
4903 xtarget = adjust_address (xtarget, mode, 0);
4904 if (TREE_CODE (value) == CONSTRUCTOR)
4905 store_constructor (value, xtarget, cleared,
4906 bitsize / BITS_PER_UNIT);
4908 store_expr (value, xtarget, 0);
4910 expand_exit_loop_if_false (loop,
4911 build (LT_EXPR, integer_type_node,
4914 expand_increment (build (PREINCREMENT_EXPR,
4916 index, integer_one_node), 0, 0);
4918 emit_label (loop_end);
4921 else if ((index != 0 && ! host_integerp (index, 0))
4922 || ! host_integerp (TYPE_SIZE (elttype), 1))
4930 index = ssize_int (1);
4933 index = convert (ssizetype,
4934 fold (build (MINUS_EXPR, index,
4935 TYPE_MIN_VALUE (domain))));
4937 position = size_binop (MULT_EXPR, index,
4939 TYPE_SIZE_UNIT (elttype)));
4940 xtarget = offset_address (target,
4941 expand_expr (position, 0, VOIDmode, 0),
4942 highest_pow2_factor (position));
4943 xtarget = adjust_address (xtarget, mode, 0);
4944 store_expr (value, xtarget, 0);
4951 pos = tree_low_cst (index, 0) - minelt;
4954 vector[pos] = expand_expr (value, NULL_RTX, VOIDmode, 0);
4959 bitpos = ((tree_low_cst (index, 0) - minelt)
4960 * tree_low_cst (TYPE_SIZE (elttype), 1));
4962 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4964 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
4965 && TREE_CODE (type) == ARRAY_TYPE
4966 && TYPE_NONALIASED_COMPONENT (type))
4968 target = copy_rtx (target);
4969 MEM_KEEP_ALIAS_SET_P (target) = 1;
4971 store_constructor_field (target, bitsize, bitpos, mode, value,
4972 type, cleared, get_alias_set (elttype));
4977 emit_insn (GEN_FCN (icode) (target,
4978 gen_rtx_PARALLEL (GET_MODE (target),
4979 gen_rtvec_v (n_elts, vector))));
4983 /* Set constructor assignments. */
4984 else if (TREE_CODE (type) == SET_TYPE)
4986 tree elt = CONSTRUCTOR_ELTS (exp);
4987 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
4988 tree domain = TYPE_DOMAIN (type);
4989 tree domain_min, domain_max, bitlength;
4991 /* The default implementation strategy is to extract the constant
4992 parts of the constructor, use that to initialize the target,
4993 and then "or" in whatever non-constant ranges we need in addition.
4995 If a large set is all zero or all ones, it is
4996 probably better to set it using memset (if available) or bzero.
4997 Also, if a large set has just a single range, it may also be
4998 better to first clear all the first clear the set (using
4999 bzero/memset), and set the bits we want. */
5001 /* Check for all zeros. */
5002 if (elt == NULL_TREE && size > 0)
5005 clear_storage (target, GEN_INT (size));
5009 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
5010 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
5011 bitlength = size_binop (PLUS_EXPR,
5012 size_diffop (domain_max, domain_min),
5015 nbits = tree_low_cst (bitlength, 1);
5017 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
5018 are "complicated" (more than one range), initialize (the
5019 constant parts) by copying from a constant. */
5020 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
5021 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
5023 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
5024 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
5025 char *bit_buffer = alloca (nbits);
5026 HOST_WIDE_INT word = 0;
5027 unsigned int bit_pos = 0;
5028 unsigned int ibit = 0;
5029 unsigned int offset = 0; /* In bytes from beginning of set. */
5031 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
5034 if (bit_buffer[ibit])
5036 if (BYTES_BIG_ENDIAN)
5037 word |= (1 << (set_word_size - 1 - bit_pos));
5039 word |= 1 << bit_pos;
5043 if (bit_pos >= set_word_size || ibit == nbits)
5045 if (word != 0 || ! cleared)
5047 rtx datum = GEN_INT (word);
5050 /* The assumption here is that it is safe to use
5051 XEXP if the set is multi-word, but not if
5052 it's single-word. */
5053 if (GET_CODE (target) == MEM)
5054 to_rtx = adjust_address (target, mode, offset);
5055 else if (offset == 0)
5059 emit_move_insn (to_rtx, datum);
5066 offset += set_word_size / BITS_PER_UNIT;
5071 /* Don't bother clearing storage if the set is all ones. */
5072 if (TREE_CHAIN (elt) != NULL_TREE
5073 || (TREE_PURPOSE (elt) == NULL_TREE
5075 : ( ! host_integerp (TREE_VALUE (elt), 0)
5076 || ! host_integerp (TREE_PURPOSE (elt), 0)
5077 || (tree_low_cst (TREE_VALUE (elt), 0)
5078 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5079 != (HOST_WIDE_INT) nbits))))
5080 clear_storage (target, expr_size (exp));
5082 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5084 /* Start of range of element or NULL. */
5085 tree startbit = TREE_PURPOSE (elt);
5086 /* End of range of element, or element value. */
5087 tree endbit = TREE_VALUE (elt);
5088 HOST_WIDE_INT startb, endb;
5089 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5091 bitlength_rtx = expand_expr (bitlength,
5092 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5094 /* Handle non-range tuple element like [ expr ]. */
5095 if (startbit == NULL_TREE)
5097 startbit = save_expr (endbit);
5101 startbit = convert (sizetype, startbit);
5102 endbit = convert (sizetype, endbit);
5103 if (! integer_zerop (domain_min))
5105 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5106 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5108 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5109 EXPAND_CONST_ADDRESS);
5110 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5111 EXPAND_CONST_ADDRESS);
5117 ((build_qualified_type ((*lang_hooks.types.type_for_mode)
5118 (GET_MODE (target), 0),
5121 emit_move_insn (targetx, target);
5124 else if (GET_CODE (target) == MEM)
5129 /* Optimization: If startbit and endbit are constants divisible
5130 by BITS_PER_UNIT, call memset instead. */
5131 if (TARGET_MEM_FUNCTIONS
5132 && TREE_CODE (startbit) == INTEGER_CST
5133 && TREE_CODE (endbit) == INTEGER_CST
5134 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5135 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5137 emit_library_call (memset_libfunc, LCT_NORMAL,
5139 plus_constant (XEXP (targetx, 0),
5140 startb / BITS_PER_UNIT),
5142 constm1_rtx, TYPE_MODE (integer_type_node),
5143 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5144 TYPE_MODE (sizetype));
5147 emit_library_call (setbits_libfunc, LCT_NORMAL,
5148 VOIDmode, 4, XEXP (targetx, 0),
5149 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5150 startbit_rtx, TYPE_MODE (sizetype),
5151 endbit_rtx, TYPE_MODE (sizetype));
5154 emit_move_insn (target, targetx);
5162 /* Store the value of EXP (an expression tree)
5163 into a subfield of TARGET which has mode MODE and occupies
5164 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5165 If MODE is VOIDmode, it means that we are storing into a bit-field.
5167 If VALUE_MODE is VOIDmode, return nothing in particular.
5168 UNSIGNEDP is not used in this case.
5170 Otherwise, return an rtx for the value stored. This rtx
5171 has mode VALUE_MODE if that is convenient to do.
5172 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5174 TYPE is the type of the underlying object,
5176 ALIAS_SET is the alias set for the destination. This value will
5177 (in general) be different from that for TARGET, since TARGET is a
5178 reference to the containing structure. */
5181 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5182 enum machine_mode mode, tree exp, enum machine_mode value_mode,
5183 int unsignedp, tree type, int alias_set)
5185 HOST_WIDE_INT width_mask = 0;
5187 if (TREE_CODE (exp) == ERROR_MARK)
5190 /* If we have nothing to store, do nothing unless the expression has
5193 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5194 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5195 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5197 /* If we are storing into an unaligned field of an aligned union that is
5198 in a register, we may have the mode of TARGET being an integer mode but
5199 MODE == BLKmode. In that case, get an aligned object whose size and
5200 alignment are the same as TARGET and store TARGET into it (we can avoid
5201 the store if the field being stored is the entire width of TARGET). Then
5202 call ourselves recursively to store the field into a BLKmode version of
5203 that object. Finally, load from the object into TARGET. This is not
5204 very efficient in general, but should only be slightly more expensive
5205 than the otherwise-required unaligned accesses. Perhaps this can be
5206 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5207 twice, once with emit_move_insn and once via store_field. */
5210 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5212 rtx object = assign_temp (type, 0, 1, 1);
5213 rtx blk_object = adjust_address (object, BLKmode, 0);
5215 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5216 emit_move_insn (object, target);
5218 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5221 emit_move_insn (target, object);
5223 /* We want to return the BLKmode version of the data. */
5227 if (GET_CODE (target) == CONCAT)
5229 /* We're storing into a struct containing a single __complex. */
5233 return store_expr (exp, target, 0);
5236 /* If the structure is in a register or if the component
5237 is a bit field, we cannot use addressing to access it.
5238 Use bit-field techniques or SUBREG to store in it. */
5240 if (mode == VOIDmode
5241 || (mode != BLKmode && ! direct_store[(int) mode]
5242 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5243 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5244 || GET_CODE (target) == REG
5245 || GET_CODE (target) == SUBREG
5246 /* If the field isn't aligned enough to store as an ordinary memref,
5247 store it as a bit field. */
5249 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5250 || bitpos % GET_MODE_ALIGNMENT (mode))
5251 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5252 || (bitpos % BITS_PER_UNIT != 0)))
5253 /* If the RHS and field are a constant size and the size of the
5254 RHS isn't the same size as the bitfield, we must use bitfield
5257 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5258 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5260 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5262 /* If BITSIZE is narrower than the size of the type of EXP
5263 we will be narrowing TEMP. Normally, what's wanted are the
5264 low-order bits. However, if EXP's type is a record and this is
5265 big-endian machine, we want the upper BITSIZE bits. */
5266 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5267 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5268 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5269 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5270 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5274 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5276 if (mode != VOIDmode && mode != BLKmode
5277 && mode != TYPE_MODE (TREE_TYPE (exp)))
5278 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5280 /* If the modes of TARGET and TEMP are both BLKmode, both
5281 must be in memory and BITPOS must be aligned on a byte
5282 boundary. If so, we simply do a block copy. */
5283 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5285 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5286 || bitpos % BITS_PER_UNIT != 0)
5289 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5290 emit_block_move (target, temp,
5291 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5295 return value_mode == VOIDmode ? const0_rtx : target;
5298 /* Store the value in the bitfield. */
5299 store_bit_field (target, bitsize, bitpos, mode, temp,
5300 int_size_in_bytes (type));
5302 if (value_mode != VOIDmode)
5304 /* The caller wants an rtx for the value.
5305 If possible, avoid refetching from the bitfield itself. */
5307 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5310 enum machine_mode tmode;
5312 tmode = GET_MODE (temp);
5313 if (tmode == VOIDmode)
5317 return expand_and (tmode, temp,
5318 gen_int_mode (width_mask, tmode),
5321 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5322 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5323 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5326 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5327 NULL_RTX, value_mode, VOIDmode,
5328 int_size_in_bytes (type));
5334 rtx addr = XEXP (target, 0);
5335 rtx to_rtx = target;
5337 /* If a value is wanted, it must be the lhs;
5338 so make the address stable for multiple use. */
5340 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5341 && ! CONSTANT_ADDRESS_P (addr)
5342 /* A frame-pointer reference is already stable. */
5343 && ! (GET_CODE (addr) == PLUS
5344 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5345 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5346 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5347 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5349 /* Now build a reference to just the desired component. */
5351 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5353 if (to_rtx == target)
5354 to_rtx = copy_rtx (to_rtx);
5356 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5357 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5358 set_mem_alias_set (to_rtx, alias_set);
5360 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5364 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5365 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5366 codes and find the ultimate containing object, which we return.
5368 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5369 bit position, and *PUNSIGNEDP to the signedness of the field.
5370 If the position of the field is variable, we store a tree
5371 giving the variable offset (in units) in *POFFSET.
5372 This offset is in addition to the bit position.
5373 If the position is not variable, we store 0 in *POFFSET.
5375 If any of the extraction expressions is volatile,
5376 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5378 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5379 is a mode that can be used to access the field. In that case, *PBITSIZE
5382 If the field describes a variable-sized object, *PMODE is set to
5383 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5384 this case, but the address of the object can be found. */
5387 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5388 HOST_WIDE_INT *pbitpos, tree *poffset,
5389 enum machine_mode *pmode, int *punsignedp,
5393 enum machine_mode mode = VOIDmode;
5394 tree offset = size_zero_node;
5395 tree bit_offset = bitsize_zero_node;
5396 tree placeholder_ptr = 0;
5399 /* First get the mode, signedness, and size. We do this from just the
5400 outermost expression. */
5401 if (TREE_CODE (exp) == COMPONENT_REF)
5403 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5404 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5405 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5407 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5409 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5411 size_tree = TREE_OPERAND (exp, 1);
5412 *punsignedp = TREE_UNSIGNED (exp);
5416 mode = TYPE_MODE (TREE_TYPE (exp));
5417 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5419 if (mode == BLKmode)
5420 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5422 *pbitsize = GET_MODE_BITSIZE (mode);
5427 if (! host_integerp (size_tree, 1))
5428 mode = BLKmode, *pbitsize = -1;
5430 *pbitsize = tree_low_cst (size_tree, 1);
5433 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5434 and find the ultimate containing object. */
5437 if (TREE_CODE (exp) == BIT_FIELD_REF)
5438 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5439 else if (TREE_CODE (exp) == COMPONENT_REF)
5441 tree field = TREE_OPERAND (exp, 1);
5442 tree this_offset = DECL_FIELD_OFFSET (field);
5444 /* If this field hasn't been filled in yet, don't go
5445 past it. This should only happen when folding expressions
5446 made during type construction. */
5447 if (this_offset == 0)
5449 else if (CONTAINS_PLACEHOLDER_P (this_offset))
5450 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5452 offset = size_binop (PLUS_EXPR, offset, this_offset);
5453 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5454 DECL_FIELD_BIT_OFFSET (field));
5456 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5459 else if (TREE_CODE (exp) == ARRAY_REF
5460 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5462 tree index = TREE_OPERAND (exp, 1);
5463 tree array = TREE_OPERAND (exp, 0);
5464 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5465 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5466 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5468 /* We assume all arrays have sizes that are a multiple of a byte.
5469 First subtract the lower bound, if any, in the type of the
5470 index, then convert to sizetype and multiply by the size of the
5472 if (low_bound != 0 && ! integer_zerop (low_bound))
5473 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5476 /* If the index has a self-referential type, pass it to a
5477 WITH_RECORD_EXPR; if the component size is, pass our
5478 component to one. */
5479 if (CONTAINS_PLACEHOLDER_P (index))
5480 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5481 if (CONTAINS_PLACEHOLDER_P (unit_size))
5482 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5484 offset = size_binop (PLUS_EXPR, offset,
5485 size_binop (MULT_EXPR,
5486 convert (sizetype, index),
5490 else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5492 tree new = find_placeholder (exp, &placeholder_ptr);
5494 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5495 We might have been called from tree optimization where we
5496 haven't set up an object yet. */
5505 /* We can go inside most conversions: all NON_VALUE_EXPRs, all normal
5506 conversions that don't change the mode, and all view conversions
5507 except those that need to "step up" the alignment. */
5508 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5509 && ! (TREE_CODE (exp) == VIEW_CONVERT_EXPR
5510 && ! ((TYPE_ALIGN (TREE_TYPE (exp))
5511 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5513 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5514 < BIGGEST_ALIGNMENT)
5515 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5516 || TYPE_ALIGN_OK (TREE_TYPE
5517 (TREE_OPERAND (exp, 0))))))
5518 && ! ((TREE_CODE (exp) == NOP_EXPR
5519 || TREE_CODE (exp) == CONVERT_EXPR)
5520 && (TYPE_MODE (TREE_TYPE (exp))
5521 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5524 /* If any reference in the chain is volatile, the effect is volatile. */
5525 if (TREE_THIS_VOLATILE (exp))
5528 exp = TREE_OPERAND (exp, 0);
5531 /* If OFFSET is constant, see if we can return the whole thing as a
5532 constant bit position. Otherwise, split it up. */
5533 if (host_integerp (offset, 0)
5534 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5536 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5537 && host_integerp (tem, 0))
5538 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5540 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5546 /* Return 1 if T is an expression that get_inner_reference handles. */
5549 handled_component_p (tree t)
5551 switch (TREE_CODE (t))
5556 case ARRAY_RANGE_REF:
5557 case NON_LVALUE_EXPR:
5558 case VIEW_CONVERT_EXPR:
5561 /* ??? Sure they are handled, but get_inner_reference may return
5562 a different PBITSIZE, depending upon whether the expression is
5563 wrapped up in a NOP_EXPR or not, e.g. for bitfields. */
5566 return (TYPE_MODE (TREE_TYPE (t))
5567 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5574 /* Given an rtx VALUE that may contain additions and multiplications, return
5575 an equivalent value that just refers to a register, memory, or constant.
5576 This is done by generating instructions to perform the arithmetic and
5577 returning a pseudo-register containing the value.
5579 The returned value may be a REG, SUBREG, MEM or constant. */
5582 force_operand (rtx value, rtx target)
5585 /* Use subtarget as the target for operand 0 of a binary operation. */
5586 rtx subtarget = get_subtarget (target);
5587 enum rtx_code code = GET_CODE (value);
5589 /* Check for a PIC address load. */
5590 if ((code == PLUS || code == MINUS)
5591 && XEXP (value, 0) == pic_offset_table_rtx
5592 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5593 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5594 || GET_CODE (XEXP (value, 1)) == CONST))
5597 subtarget = gen_reg_rtx (GET_MODE (value));
5598 emit_move_insn (subtarget, value);
5602 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5605 target = gen_reg_rtx (GET_MODE (value));
5606 convert_move (target, force_operand (XEXP (value, 0), NULL),
5607 code == ZERO_EXTEND);
5611 if (GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c')
5613 op2 = XEXP (value, 1);
5614 if (!CONSTANT_P (op2) && !(GET_CODE (op2) == REG && op2 != subtarget))
5616 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5619 op2 = negate_rtx (GET_MODE (value), op2);
5622 /* Check for an addition with OP2 a constant integer and our first
5623 operand a PLUS of a virtual register and something else. In that
5624 case, we want to emit the sum of the virtual register and the
5625 constant first and then add the other value. This allows virtual
5626 register instantiation to simply modify the constant rather than
5627 creating another one around this addition. */
5628 if (code == PLUS && GET_CODE (op2) == CONST_INT
5629 && GET_CODE (XEXP (value, 0)) == PLUS
5630 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5631 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5632 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5634 rtx temp = expand_simple_binop (GET_MODE (value), code,
5635 XEXP (XEXP (value, 0), 0), op2,
5636 subtarget, 0, OPTAB_LIB_WIDEN);
5637 return expand_simple_binop (GET_MODE (value), code, temp,
5638 force_operand (XEXP (XEXP (value,
5640 target, 0, OPTAB_LIB_WIDEN);
5643 op1 = force_operand (XEXP (value, 0), subtarget);
5644 op2 = force_operand (op2, NULL_RTX);
5648 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5650 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5651 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5652 target, 1, OPTAB_LIB_WIDEN);
5654 return expand_divmod (0,
5655 FLOAT_MODE_P (GET_MODE (value))
5656 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5657 GET_MODE (value), op1, op2, target, 0);
5660 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5664 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5668 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5672 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5673 target, 0, OPTAB_LIB_WIDEN);
5676 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5677 target, 1, OPTAB_LIB_WIDEN);
5680 if (GET_RTX_CLASS (code) == '1')
5682 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5683 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
5686 #ifdef INSN_SCHEDULING
5687 /* On machines that have insn scheduling, we want all memory reference to be
5688 explicit, so we need to deal with such paradoxical SUBREGs. */
5689 if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
5690 && (GET_MODE_SIZE (GET_MODE (value))
5691 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5693 = simplify_gen_subreg (GET_MODE (value),
5694 force_reg (GET_MODE (SUBREG_REG (value)),
5695 force_operand (SUBREG_REG (value),
5697 GET_MODE (SUBREG_REG (value)),
5698 SUBREG_BYTE (value));
5704 /* Subroutine of expand_expr: return nonzero iff there is no way that
5705 EXP can reference X, which is being modified. TOP_P is nonzero if this
5706 call is going to be used to determine whether we need a temporary
5707 for EXP, as opposed to a recursive call to this function.
5709 It is always safe for this routine to return zero since it merely
5710 searches for optimization opportunities. */
5713 safe_from_p (rtx x, tree exp, int top_p)
5717 static tree save_expr_list;
5720 /* If EXP has varying size, we MUST use a target since we currently
5721 have no way of allocating temporaries of variable size
5722 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5723 So we assume here that something at a higher level has prevented a
5724 clash. This is somewhat bogus, but the best we can do. Only
5725 do this when X is BLKmode and when we are at the top level. */
5726 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5727 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5728 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5729 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5730 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5732 && GET_MODE (x) == BLKmode)
5733 /* If X is in the outgoing argument area, it is always safe. */
5734 || (GET_CODE (x) == MEM
5735 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5736 || (GET_CODE (XEXP (x, 0)) == PLUS
5737 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5740 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5741 find the underlying pseudo. */
5742 if (GET_CODE (x) == SUBREG)
5745 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5749 /* A SAVE_EXPR might appear many times in the expression passed to the
5750 top-level safe_from_p call, and if it has a complex subexpression,
5751 examining it multiple times could result in a combinatorial explosion.
5752 E.g. on an Alpha running at least 200MHz, a Fortran testcase compiled
5753 with optimization took about 28 minutes to compile -- even though it was
5754 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5755 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5756 we have processed. Note that the only test of top_p was above. */
5765 rtn = safe_from_p (x, exp, 0);
5767 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5768 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5773 /* Now look at our tree code and possibly recurse. */
5774 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5777 exp_rtl = DECL_RTL_IF_SET (exp);
5784 if (TREE_CODE (exp) == TREE_LIST)
5788 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
5790 exp = TREE_CHAIN (exp);
5793 if (TREE_CODE (exp) != TREE_LIST)
5794 return safe_from_p (x, exp, 0);
5797 else if (TREE_CODE (exp) == ERROR_MARK)
5798 return 1; /* An already-visited SAVE_EXPR? */
5804 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
5809 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5813 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5814 the expression. If it is set, we conflict iff we are that rtx or
5815 both are in memory. Otherwise, we check all operands of the
5816 expression recursively. */
5818 switch (TREE_CODE (exp))
5821 /* If the operand is static or we are static, we can't conflict.
5822 Likewise if we don't conflict with the operand at all. */
5823 if (staticp (TREE_OPERAND (exp, 0))
5824 || TREE_STATIC (exp)
5825 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5828 /* Otherwise, the only way this can conflict is if we are taking
5829 the address of a DECL a that address if part of X, which is
5831 exp = TREE_OPERAND (exp, 0);
5834 if (!DECL_RTL_SET_P (exp)
5835 || GET_CODE (DECL_RTL (exp)) != MEM)
5838 exp_rtl = XEXP (DECL_RTL (exp), 0);
5843 if (GET_CODE (x) == MEM
5844 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5845 get_alias_set (exp)))
5850 /* Assume that the call will clobber all hard registers and
5852 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5853 || GET_CODE (x) == MEM)
5858 /* If a sequence exists, we would have to scan every instruction
5859 in the sequence to see if it was safe. This is probably not
5861 if (RTL_EXPR_SEQUENCE (exp))
5864 exp_rtl = RTL_EXPR_RTL (exp);
5867 case WITH_CLEANUP_EXPR:
5868 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
5871 case CLEANUP_POINT_EXPR:
5872 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5875 exp_rtl = SAVE_EXPR_RTL (exp);
5879 /* If we've already scanned this, don't do it again. Otherwise,
5880 show we've scanned it and record for clearing the flag if we're
5882 if (TREE_PRIVATE (exp))
5885 TREE_PRIVATE (exp) = 1;
5886 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5888 TREE_PRIVATE (exp) = 0;
5892 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
5896 /* The only operand we look at is operand 1. The rest aren't
5897 part of the expression. */
5898 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5904 /* If we have an rtx, we do not need to scan our operands. */
5908 nops = first_rtl_op (TREE_CODE (exp));
5909 for (i = 0; i < nops; i++)
5910 if (TREE_OPERAND (exp, i) != 0
5911 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5914 /* If this is a language-specific tree code, it may require
5915 special handling. */
5916 if ((unsigned int) TREE_CODE (exp)
5917 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5918 && !(*lang_hooks.safe_from_p) (x, exp))
5922 /* If we have an rtl, find any enclosed object. Then see if we conflict
5926 if (GET_CODE (exp_rtl) == SUBREG)
5928 exp_rtl = SUBREG_REG (exp_rtl);
5929 if (GET_CODE (exp_rtl) == REG
5930 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5934 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5935 are memory and they conflict. */
5936 return ! (rtx_equal_p (x, exp_rtl)
5937 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5938 && true_dependence (exp_rtl, VOIDmode, x,
5939 rtx_addr_varies_p)));
5942 /* If we reach here, it is safe. */
5946 /* Subroutine of expand_expr: return rtx if EXP is a
5947 variable or parameter; else return 0. */
5953 switch (TREE_CODE (exp))
5957 return DECL_RTL (exp);
5963 /* Return the highest power of two that EXP is known to be a multiple of.
5964 This is used in updating alignment of MEMs in array references. */
5966 static unsigned HOST_WIDE_INT
5967 highest_pow2_factor (tree exp)
5969 unsigned HOST_WIDE_INT c0, c1;
5971 switch (TREE_CODE (exp))
5974 /* We can find the lowest bit that's a one. If the low
5975 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
5976 We need to handle this case since we can find it in a COND_EXPR,
5977 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
5978 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
5980 if (TREE_CONSTANT_OVERFLOW (exp))
5981 return BIGGEST_ALIGNMENT;
5984 /* Note: tree_low_cst is intentionally not used here,
5985 we don't care about the upper bits. */
5986 c0 = TREE_INT_CST_LOW (exp);
5988 return c0 ? c0 : BIGGEST_ALIGNMENT;
5992 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
5993 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5994 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
5995 return MIN (c0, c1);
5998 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
5999 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6002 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6004 if (integer_pow2p (TREE_OPERAND (exp, 1))
6005 && host_integerp (TREE_OPERAND (exp, 1), 1))
6007 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6008 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6009 return MAX (1, c0 / c1);
6013 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6014 case SAVE_EXPR: case WITH_RECORD_EXPR:
6015 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6018 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6021 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6022 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6023 return MIN (c0, c1);
6032 /* Similar, except that it is known that the expression must be a multiple
6033 of the alignment of TYPE. */
6035 static unsigned HOST_WIDE_INT
6036 highest_pow2_factor_for_type (tree type, tree exp)
6038 unsigned HOST_WIDE_INT type_align, factor;
6040 factor = highest_pow2_factor (exp);
6041 type_align = TYPE_ALIGN (type) / BITS_PER_UNIT;
6042 return MAX (factor, type_align);
6045 /* Return an object on the placeholder list that matches EXP, a
6046 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
6047 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
6048 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
6049 is a location which initially points to a starting location in the
6050 placeholder list (zero means start of the list) and where a pointer into
6051 the placeholder list at which the object is found is placed. */
6054 find_placeholder (tree exp, tree *plist)
6056 tree type = TREE_TYPE (exp);
6057 tree placeholder_expr;
6059 for (placeholder_expr
6060 = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
6061 placeholder_expr != 0;
6062 placeholder_expr = TREE_CHAIN (placeholder_expr))
6064 tree need_type = TYPE_MAIN_VARIANT (type);
6067 /* Find the outermost reference that is of the type we want. If none,
6068 see if any object has a type that is a pointer to the type we
6070 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6071 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
6072 || TREE_CODE (elt) == COND_EXPR)
6073 ? TREE_OPERAND (elt, 1)
6074 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6075 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6076 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6077 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6078 ? TREE_OPERAND (elt, 0) : 0))
6079 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6082 *plist = placeholder_expr;
6086 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6088 = ((TREE_CODE (elt) == COMPOUND_EXPR
6089 || TREE_CODE (elt) == COND_EXPR)
6090 ? TREE_OPERAND (elt, 1)
6091 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6092 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6093 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6094 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6095 ? TREE_OPERAND (elt, 0) : 0))
6096 if (POINTER_TYPE_P (TREE_TYPE (elt))
6097 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6101 *plist = placeholder_expr;
6102 return build1 (INDIRECT_REF, need_type, elt);
6109 /* Subroutine of expand_expr. Expand the two operands of a binary
6110 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6111 The value may be stored in TARGET if TARGET is nonzero. The
6112 MODIFIER argument is as documented by expand_expr. */
6115 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6116 enum expand_modifier modifier)
6118 if (! safe_from_p (target, exp1, 1))
6120 if (operand_equal_p (exp0, exp1, 0))
6122 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6123 *op1 = copy_rtx (*op0);
6127 /* If we need to preserve evaluation order, copy exp0 into its own
6128 temporary variable so that it can't be clobbered by exp1. */
6129 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6130 exp0 = save_expr (exp0);
6131 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6132 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6137 /* expand_expr: generate code for computing expression EXP.
6138 An rtx for the computed value is returned. The value is never null.
6139 In the case of a void EXP, const0_rtx is returned.
6141 The value may be stored in TARGET if TARGET is nonzero.
6142 TARGET is just a suggestion; callers must assume that
6143 the rtx returned may not be the same as TARGET.
6145 If TARGET is CONST0_RTX, it means that the value will be ignored.
6147 If TMODE is not VOIDmode, it suggests generating the
6148 result in mode TMODE. But this is done only when convenient.
6149 Otherwise, TMODE is ignored and the value generated in its natural mode.
6150 TMODE is just a suggestion; callers must assume that
6151 the rtx returned may not have mode TMODE.
6153 Note that TARGET may have neither TMODE nor MODE. In that case, it
6154 probably will not be used.
6156 If MODIFIER is EXPAND_SUM then when EXP is an addition
6157 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6158 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6159 products as above, or REG or MEM, or constant.
6160 Ordinarily in such cases we would output mul or add instructions
6161 and then return a pseudo reg containing the sum.
6163 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6164 it also marks a label as absolutely required (it can't be dead).
6165 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6166 This is used for outputting expressions used in initializers.
6168 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6169 with a constant address even if that address is not normally legitimate.
6170 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6172 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6173 a call parameter. Such targets require special care as we haven't yet
6174 marked TARGET so that it's safe from being trashed by libcalls. We
6175 don't want to use TARGET for anything but the final result;
6176 Intermediate values must go elsewhere. Additionally, calls to
6177 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6179 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6180 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6181 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
6182 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6186 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
6187 enum expand_modifier modifier, rtx *alt_rtl)
6190 tree type = TREE_TYPE (exp);
6191 int unsignedp = TREE_UNSIGNED (type);
6192 enum machine_mode mode;
6193 enum tree_code code = TREE_CODE (exp);
6195 rtx subtarget, original_target;
6199 /* Handle ERROR_MARK before anybody tries to access its type. */
6200 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6202 op0 = CONST0_RTX (tmode);
6208 mode = TYPE_MODE (type);
6209 /* Use subtarget as the target for operand 0 of a binary operation. */
6210 subtarget = get_subtarget (target);
6211 original_target = target;
6212 ignore = (target == const0_rtx
6213 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6214 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6215 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
6216 && TREE_CODE (type) == VOID_TYPE));
6218 /* If we are going to ignore this result, we need only do something
6219 if there is a side-effect somewhere in the expression. If there
6220 is, short-circuit the most common cases here. Note that we must
6221 not call expand_expr with anything but const0_rtx in case this
6222 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6226 if (! TREE_SIDE_EFFECTS (exp))
6229 /* Ensure we reference a volatile object even if value is ignored, but
6230 don't do this if all we are doing is taking its address. */
6231 if (TREE_THIS_VOLATILE (exp)
6232 && TREE_CODE (exp) != FUNCTION_DECL
6233 && mode != VOIDmode && mode != BLKmode
6234 && modifier != EXPAND_CONST_ADDRESS)
6236 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6237 if (GET_CODE (temp) == MEM)
6238 temp = copy_to_reg (temp);
6242 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6243 || code == INDIRECT_REF || code == BUFFER_REF)
6244 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6247 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6248 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6250 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6251 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6254 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6255 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6256 /* If the second operand has no side effects, just evaluate
6258 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6260 else if (code == BIT_FIELD_REF)
6262 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6263 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6264 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6271 /* If will do cse, generate all results into pseudo registers
6272 since 1) that allows cse to find more things
6273 and 2) otherwise cse could produce an insn the machine
6274 cannot support. An exception is a CONSTRUCTOR into a multi-word
6275 MEM: that's much more likely to be most efficient into the MEM.
6276 Another is a CALL_EXPR which must return in memory. */
6278 if (! cse_not_expected && mode != BLKmode && target
6279 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
6280 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
6281 && ! (code == CALL_EXPR && aggregate_value_p (exp, exp)))
6288 tree function = decl_function_context (exp);
6289 /* Labels in containing functions, or labels used from initializers,
6291 if (modifier == EXPAND_INITIALIZER
6292 || (function != current_function_decl
6293 && function != inline_function_decl
6295 temp = force_label_rtx (exp);
6297 temp = label_rtx (exp);
6299 temp = gen_rtx_MEM (FUNCTION_MODE, gen_rtx_LABEL_REF (Pmode, temp));
6300 if (function != current_function_decl
6301 && function != inline_function_decl && function != 0)
6302 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6307 if (!DECL_RTL_SET_P (exp))
6309 error ("%Jprior parameter's size depends on '%D'", exp, exp);
6310 return CONST0_RTX (mode);
6313 /* ... fall through ... */
6316 /* If a static var's type was incomplete when the decl was written,
6317 but the type is complete now, lay out the decl now. */
6318 if (DECL_SIZE (exp) == 0
6319 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6320 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6321 layout_decl (exp, 0);
6323 /* ... fall through ... */
6327 if (DECL_RTL (exp) == 0)
6330 /* Ensure variable marked as used even if it doesn't go through
6331 a parser. If it hasn't be used yet, write out an external
6333 if (! TREE_USED (exp))
6335 assemble_external (exp);
6336 TREE_USED (exp) = 1;
6339 /* Show we haven't gotten RTL for this yet. */
6342 /* Handle variables inherited from containing functions. */
6343 context = decl_function_context (exp);
6345 /* We treat inline_function_decl as an alias for the current function
6346 because that is the inline function whose vars, types, etc.
6347 are being merged into the current function.
6348 See expand_inline_function. */
6350 if (context != 0 && context != current_function_decl
6351 && context != inline_function_decl
6352 /* If var is static, we don't need a static chain to access it. */
6353 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6354 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6358 /* Mark as non-local and addressable. */
6359 DECL_NONLOCAL (exp) = 1;
6360 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6362 (*lang_hooks.mark_addressable) (exp);
6363 if (GET_CODE (DECL_RTL (exp)) != MEM)
6365 addr = XEXP (DECL_RTL (exp), 0);
6366 if (GET_CODE (addr) == MEM)
6368 = replace_equiv_address (addr,
6369 fix_lexical_addr (XEXP (addr, 0), exp));
6371 addr = fix_lexical_addr (addr, exp);
6373 temp = replace_equiv_address (DECL_RTL (exp), addr);
6376 /* This is the case of an array whose size is to be determined
6377 from its initializer, while the initializer is still being parsed.
6380 else if (GET_CODE (DECL_RTL (exp)) == MEM
6381 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6382 temp = validize_mem (DECL_RTL (exp));
6384 /* If DECL_RTL is memory, we are in the normal case and either
6385 the address is not valid or it is not a register and -fforce-addr
6386 is specified, get the address into a register. */
6388 else if (GET_CODE (DECL_RTL (exp)) == MEM
6389 && modifier != EXPAND_CONST_ADDRESS
6390 && modifier != EXPAND_SUM
6391 && modifier != EXPAND_INITIALIZER
6392 && (! memory_address_p (DECL_MODE (exp),
6393 XEXP (DECL_RTL (exp), 0))
6395 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6398 *alt_rtl = DECL_RTL (exp);
6399 temp = replace_equiv_address (DECL_RTL (exp),
6400 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6403 /* If we got something, return it. But first, set the alignment
6404 if the address is a register. */
6407 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6408 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6413 /* If the mode of DECL_RTL does not match that of the decl, it
6414 must be a promoted value. We return a SUBREG of the wanted mode,
6415 but mark it so that we know that it was already extended. */
6417 if (GET_CODE (DECL_RTL (exp)) == REG
6418 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6420 /* Get the signedness used for this variable. Ensure we get the
6421 same mode we got when the variable was declared. */
6422 if (GET_MODE (DECL_RTL (exp))
6423 != promote_mode (type, DECL_MODE (exp), &unsignedp,
6424 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
6427 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6428 SUBREG_PROMOTED_VAR_P (temp) = 1;
6429 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6433 return DECL_RTL (exp);
6436 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6437 TREE_INT_CST_HIGH (exp), mode);
6439 /* ??? If overflow is set, fold will have done an incomplete job,
6440 which can result in (plus xx (const_int 0)), which can get
6441 simplified by validate_replace_rtx during virtual register
6442 instantiation, which can result in unrecognizable insns.
6443 Avoid this by forcing all overflows into registers. */
6444 if (TREE_CONSTANT_OVERFLOW (exp)
6445 && modifier != EXPAND_INITIALIZER)
6446 temp = force_reg (mode, temp);
6451 return const_vector_from_tree (exp);
6454 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
6457 /* If optimized, generate immediate CONST_DOUBLE
6458 which will be turned into memory by reload if necessary.
6460 We used to force a register so that loop.c could see it. But
6461 this does not allow gen_* patterns to perform optimizations with
6462 the constants. It also produces two insns in cases like "x = 1.0;".
6463 On most machines, floating-point constants are not permitted in
6464 many insns, so we'd end up copying it to a register in any case.
6466 Now, we do the copying in expand_binop, if appropriate. */
6467 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6468 TYPE_MODE (TREE_TYPE (exp)));
6471 /* Handle evaluating a complex constant in a CONCAT target. */
6472 if (original_target && GET_CODE (original_target) == CONCAT)
6474 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6477 rtarg = XEXP (original_target, 0);
6478 itarg = XEXP (original_target, 1);
6480 /* Move the real and imaginary parts separately. */
6481 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
6482 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
6485 emit_move_insn (rtarg, op0);
6487 emit_move_insn (itarg, op1);
6489 return original_target;
6492 /* ... fall through ... */
6495 temp = output_constant_def (exp, 1);
6497 /* temp contains a constant address.
6498 On RISC machines where a constant address isn't valid,
6499 make some insns to get that address into a register. */
6500 if (modifier != EXPAND_CONST_ADDRESS
6501 && modifier != EXPAND_INITIALIZER
6502 && modifier != EXPAND_SUM
6503 && (! memory_address_p (mode, XEXP (temp, 0))
6504 || flag_force_addr))
6505 return replace_equiv_address (temp,
6506 copy_rtx (XEXP (temp, 0)));
6509 case EXPR_WITH_FILE_LOCATION:
6512 struct file_stack fs;
6514 fs.location = input_location;
6515 fs.next = expr_wfl_stack;
6516 input_filename = EXPR_WFL_FILENAME (exp);
6517 input_line = EXPR_WFL_LINENO (exp);
6518 expr_wfl_stack = &fs;
6519 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6520 emit_line_note (input_location);
6521 /* Possibly avoid switching back and forth here. */
6522 to_return = expand_expr (EXPR_WFL_NODE (exp),
6523 (ignore ? const0_rtx : target),
6525 if (expr_wfl_stack != &fs)
6527 input_location = fs.location;
6528 expr_wfl_stack = fs.next;
6533 context = decl_function_context (exp);
6535 /* If this SAVE_EXPR was at global context, assume we are an
6536 initialization function and move it into our context. */
6538 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6540 /* We treat inline_function_decl as an alias for the current function
6541 because that is the inline function whose vars, types, etc.
6542 are being merged into the current function.
6543 See expand_inline_function. */
6544 if (context == current_function_decl || context == inline_function_decl)
6547 /* If this is non-local, handle it. */
6550 /* The following call just exists to abort if the context is
6551 not of a containing function. */
6552 find_function_data (context);
6554 temp = SAVE_EXPR_RTL (exp);
6555 if (temp && GET_CODE (temp) == REG)
6557 put_var_into_stack (exp, /*rescan=*/true);
6558 temp = SAVE_EXPR_RTL (exp);
6560 if (temp == 0 || GET_CODE (temp) != MEM)
6563 replace_equiv_address (temp,
6564 fix_lexical_addr (XEXP (temp, 0), exp));
6566 if (SAVE_EXPR_RTL (exp) == 0)
6568 if (mode == VOIDmode)
6571 temp = assign_temp (build_qualified_type (type,
6573 | TYPE_QUAL_CONST)),
6576 SAVE_EXPR_RTL (exp) = temp;
6577 if (!optimize && GET_CODE (temp) == REG)
6578 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6581 /* If the mode of TEMP does not match that of the expression, it
6582 must be a promoted value. We pass store_expr a SUBREG of the
6583 wanted mode but mark it so that we know that it was already
6586 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6588 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6589 promote_mode (type, mode, &unsignedp, 0);
6590 SUBREG_PROMOTED_VAR_P (temp) = 1;
6591 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6594 if (temp == const0_rtx)
6595 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6597 store_expr (TREE_OPERAND (exp, 0), temp,
6598 modifier == EXPAND_STACK_PARM ? 2 : 0);
6600 TREE_USED (exp) = 1;
6603 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6604 must be a promoted value. We return a SUBREG of the wanted mode,
6605 but mark it so that we know that it was already extended. */
6607 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6608 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6610 /* Compute the signedness and make the proper SUBREG. */
6611 promote_mode (type, mode, &unsignedp, 0);
6612 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6613 SUBREG_PROMOTED_VAR_P (temp) = 1;
6614 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6618 return SAVE_EXPR_RTL (exp);
6623 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6624 TREE_OPERAND (exp, 0)
6625 = (*lang_hooks.unsave_expr_now) (TREE_OPERAND (exp, 0));
6629 case PLACEHOLDER_EXPR:
6631 tree old_list = placeholder_list;
6632 tree placeholder_expr = 0;
6634 exp = find_placeholder (exp, &placeholder_expr);
6638 placeholder_list = TREE_CHAIN (placeholder_expr);
6639 temp = expand_expr (exp, original_target, tmode, modifier);
6640 placeholder_list = old_list;
6644 case WITH_RECORD_EXPR:
6645 /* Put the object on the placeholder list, expand our first operand,
6646 and pop the list. */
6647 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6649 target = expand_expr (TREE_OPERAND (exp, 0), original_target, tmode,
6651 placeholder_list = TREE_CHAIN (placeholder_list);
6655 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6656 expand_goto (TREE_OPERAND (exp, 0));
6658 expand_computed_goto (TREE_OPERAND (exp, 0));
6662 expand_exit_loop_if_false (NULL,
6663 invert_truthvalue (TREE_OPERAND (exp, 0)));
6666 case LABELED_BLOCK_EXPR:
6667 if (LABELED_BLOCK_BODY (exp))
6668 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1);
6669 /* Should perhaps use expand_label, but this is simpler and safer. */
6670 do_pending_stack_adjust ();
6671 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6674 case EXIT_BLOCK_EXPR:
6675 if (EXIT_BLOCK_RETURN (exp))
6676 sorry ("returned value in block_exit_expr");
6677 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6682 expand_start_loop (1);
6683 expand_expr_stmt_value (TREE_OPERAND (exp, 0), 0, 1);
6691 tree vars = TREE_OPERAND (exp, 0);
6693 /* Need to open a binding contour here because
6694 if there are any cleanups they must be contained here. */
6695 expand_start_bindings (2);
6697 /* Mark the corresponding BLOCK for output in its proper place. */
6698 if (TREE_OPERAND (exp, 2) != 0
6699 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6700 (*lang_hooks.decls.insert_block) (TREE_OPERAND (exp, 2));
6702 /* If VARS have not yet been expanded, expand them now. */
6705 if (!DECL_RTL_SET_P (vars))
6707 expand_decl_init (vars);
6708 vars = TREE_CHAIN (vars);
6711 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
6713 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6719 if (RTL_EXPR_SEQUENCE (exp))
6721 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6723 emit_insn (RTL_EXPR_SEQUENCE (exp));
6724 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6726 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6727 free_temps_for_rtl_expr (exp);
6729 *alt_rtl = RTL_EXPR_ALT_RTL (exp);
6730 return RTL_EXPR_RTL (exp);
6733 /* If we don't need the result, just ensure we evaluate any
6739 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6740 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
6745 /* All elts simple constants => refer to a constant in memory. But
6746 if this is a non-BLKmode mode, let it store a field at a time
6747 since that should make a CONST_INT or CONST_DOUBLE when we
6748 fold. Likewise, if we have a target we can use, it is best to
6749 store directly into the target unless the type is large enough
6750 that memcpy will be used. If we are making an initializer and
6751 all operands are constant, put it in memory as well.
6753 FIXME: Avoid trying to fill vector constructors piece-meal.
6754 Output them with output_constant_def below unless we're sure
6755 they're zeros. This should go away when vector initializers
6756 are treated like VECTOR_CST instead of arrays.
6758 else if ((TREE_STATIC (exp)
6759 && ((mode == BLKmode
6760 && ! (target != 0 && safe_from_p (target, exp, 1)))
6761 || TREE_ADDRESSABLE (exp)
6762 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6763 && (! MOVE_BY_PIECES_P
6764 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6766 && ((TREE_CODE (type) == VECTOR_TYPE
6767 && !is_zeros_p (exp))
6768 || ! mostly_zeros_p (exp)))))
6769 || ((modifier == EXPAND_INITIALIZER
6770 || modifier == EXPAND_CONST_ADDRESS)
6771 && TREE_CONSTANT (exp)))
6773 rtx constructor = output_constant_def (exp, 1);
6775 if (modifier != EXPAND_CONST_ADDRESS
6776 && modifier != EXPAND_INITIALIZER
6777 && modifier != EXPAND_SUM)
6778 constructor = validize_mem (constructor);
6784 /* Handle calls that pass values in multiple non-contiguous
6785 locations. The Irix 6 ABI has examples of this. */
6786 if (target == 0 || ! safe_from_p (target, exp, 1)
6787 || GET_CODE (target) == PARALLEL
6788 || modifier == EXPAND_STACK_PARM)
6790 = assign_temp (build_qualified_type (type,
6792 | (TREE_READONLY (exp)
6793 * TYPE_QUAL_CONST))),
6794 0, TREE_ADDRESSABLE (exp), 1);
6796 store_constructor (exp, target, 0, int_expr_size (exp));
6802 tree exp1 = TREE_OPERAND (exp, 0);
6804 tree string = string_constant (exp1, &index);
6806 /* Try to optimize reads from const strings. */
6808 && TREE_CODE (string) == STRING_CST
6809 && TREE_CODE (index) == INTEGER_CST
6810 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6811 && GET_MODE_CLASS (mode) == MODE_INT
6812 && GET_MODE_SIZE (mode) == 1
6813 && modifier != EXPAND_WRITE)
6814 return gen_int_mode (TREE_STRING_POINTER (string)
6815 [TREE_INT_CST_LOW (index)], mode);
6817 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6818 op0 = memory_address (mode, op0);
6819 temp = gen_rtx_MEM (mode, op0);
6820 set_mem_attributes (temp, exp, 0);
6822 /* If we are writing to this object and its type is a record with
6823 readonly fields, we must mark it as readonly so it will
6824 conflict with readonly references to those fields. */
6825 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
6826 RTX_UNCHANGING_P (temp) = 1;
6832 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6836 tree array = TREE_OPERAND (exp, 0);
6837 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6838 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6839 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6842 /* Optimize the special-case of a zero lower bound.
6844 We convert the low_bound to sizetype to avoid some problems
6845 with constant folding. (E.g. suppose the lower bound is 1,
6846 and its mode is QI. Without the conversion, (ARRAY
6847 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6848 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6850 if (! integer_zerop (low_bound))
6851 index = size_diffop (index, convert (sizetype, low_bound));
6853 /* Fold an expression like: "foo"[2].
6854 This is not done in fold so it won't happen inside &.
6855 Don't fold if this is for wide characters since it's too
6856 difficult to do correctly and this is a very rare case. */
6858 if (modifier != EXPAND_CONST_ADDRESS
6859 && modifier != EXPAND_INITIALIZER
6860 && modifier != EXPAND_MEMORY
6861 && TREE_CODE (array) == STRING_CST
6862 && TREE_CODE (index) == INTEGER_CST
6863 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6864 && GET_MODE_CLASS (mode) == MODE_INT
6865 && GET_MODE_SIZE (mode) == 1)
6866 return gen_int_mode (TREE_STRING_POINTER (array)
6867 [TREE_INT_CST_LOW (index)], mode);
6869 /* If this is a constant index into a constant array,
6870 just get the value from the array. Handle both the cases when
6871 we have an explicit constructor and when our operand is a variable
6872 that was declared const. */
6874 if (modifier != EXPAND_CONST_ADDRESS
6875 && modifier != EXPAND_INITIALIZER
6876 && modifier != EXPAND_MEMORY
6877 && TREE_CODE (array) == CONSTRUCTOR
6878 && ! TREE_SIDE_EFFECTS (array)
6879 && TREE_CODE (index) == INTEGER_CST
6880 && 0 > compare_tree_int (index,
6881 list_length (CONSTRUCTOR_ELTS
6882 (TREE_OPERAND (exp, 0)))))
6886 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6887 i = TREE_INT_CST_LOW (index);
6888 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6892 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
6896 else if (optimize >= 1
6897 && modifier != EXPAND_CONST_ADDRESS
6898 && modifier != EXPAND_INITIALIZER
6899 && modifier != EXPAND_MEMORY
6900 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6901 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6902 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
6903 && targetm.binds_local_p (array))
6905 if (TREE_CODE (index) == INTEGER_CST)
6907 tree init = DECL_INITIAL (array);
6909 if (TREE_CODE (init) == CONSTRUCTOR)
6913 for (elem = CONSTRUCTOR_ELTS (init);
6915 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6916 elem = TREE_CHAIN (elem))
6919 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6920 return expand_expr (fold (TREE_VALUE (elem)), target,
6923 else if (TREE_CODE (init) == STRING_CST
6924 && 0 > compare_tree_int (index,
6925 TREE_STRING_LENGTH (init)))
6927 tree type = TREE_TYPE (TREE_TYPE (init));
6928 enum machine_mode mode = TYPE_MODE (type);
6930 if (GET_MODE_CLASS (mode) == MODE_INT
6931 && GET_MODE_SIZE (mode) == 1)
6932 return gen_int_mode (TREE_STRING_POINTER (init)
6933 [TREE_INT_CST_LOW (index)], mode);
6938 goto normal_inner_ref;
6941 /* If the operand is a CONSTRUCTOR, we can just extract the
6942 appropriate field if it is present. */
6943 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
6947 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6948 elt = TREE_CHAIN (elt))
6949 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6950 /* We can normally use the value of the field in the
6951 CONSTRUCTOR. However, if this is a bitfield in
6952 an integral mode that we can fit in a HOST_WIDE_INT,
6953 we must mask only the number of bits in the bitfield,
6954 since this is done implicitly by the constructor. If
6955 the bitfield does not meet either of those conditions,
6956 we can't do this optimization. */
6957 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6958 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6960 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6961 <= HOST_BITS_PER_WIDE_INT))))
6963 if (DECL_BIT_FIELD (TREE_PURPOSE (elt))
6964 && modifier == EXPAND_STACK_PARM)
6966 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6967 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6969 HOST_WIDE_INT bitsize
6970 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6971 enum machine_mode imode
6972 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
6974 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6976 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6977 op0 = expand_and (imode, op0, op1, target);
6982 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6985 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6987 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6995 goto normal_inner_ref;
6998 case ARRAY_RANGE_REF:
7001 enum machine_mode mode1;
7002 HOST_WIDE_INT bitsize, bitpos;
7005 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7006 &mode1, &unsignedp, &volatilep);
7009 /* If we got back the original object, something is wrong. Perhaps
7010 we are evaluating an expression too early. In any event, don't
7011 infinitely recurse. */
7015 /* If TEM's type is a union of variable size, pass TARGET to the inner
7016 computation, since it will need a temporary and TARGET is known
7017 to have to do. This occurs in unchecked conversion in Ada. */
7021 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7022 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7024 && modifier != EXPAND_STACK_PARM
7025 ? target : NULL_RTX),
7027 (modifier == EXPAND_INITIALIZER
7028 || modifier == EXPAND_CONST_ADDRESS
7029 || modifier == EXPAND_STACK_PARM)
7030 ? modifier : EXPAND_NORMAL);
7032 /* If this is a constant, put it into a register if it is a
7033 legitimate constant and OFFSET is 0 and memory if it isn't. */
7034 if (CONSTANT_P (op0))
7036 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7037 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7039 op0 = force_reg (mode, op0);
7041 op0 = validize_mem (force_const_mem (mode, op0));
7044 /* Otherwise, if this object not in memory and we either have an
7045 offset or a BLKmode result, put it there. This case can't occur in
7046 C, but can in Ada if we have unchecked conversion of an expression
7047 from a scalar type to an array or record type or for an
7048 ARRAY_RANGE_REF whose type is BLKmode. */
7049 else if (GET_CODE (op0) != MEM
7051 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
7053 /* If the operand is a SAVE_EXPR, we can deal with this by
7054 forcing the SAVE_EXPR into memory. */
7055 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
7057 put_var_into_stack (TREE_OPERAND (exp, 0),
7059 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7064 = build_qualified_type (TREE_TYPE (tem),
7065 (TYPE_QUALS (TREE_TYPE (tem))
7066 | TYPE_QUAL_CONST));
7067 rtx memloc = assign_temp (nt, 1, 1, 1);
7069 emit_move_insn (memloc, op0);
7076 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7079 if (GET_CODE (op0) != MEM)
7082 #ifdef POINTERS_EXTEND_UNSIGNED
7083 if (GET_MODE (offset_rtx) != Pmode)
7084 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7086 if (GET_MODE (offset_rtx) != ptr_mode)
7087 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7090 if (GET_MODE (op0) == BLKmode
7091 /* A constant address in OP0 can have VOIDmode, we must
7092 not try to call force_reg in that case. */
7093 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7095 && (bitpos % bitsize) == 0
7096 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7097 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7099 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7103 op0 = offset_address (op0, offset_rtx,
7104 highest_pow2_factor (offset));
7107 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7108 record its alignment as BIGGEST_ALIGNMENT. */
7109 if (GET_CODE (op0) == MEM && bitpos == 0 && offset != 0
7110 && is_aligning_offset (offset, tem))
7111 set_mem_align (op0, BIGGEST_ALIGNMENT);
7113 /* Don't forget about volatility even if this is a bitfield. */
7114 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7116 if (op0 == orig_op0)
7117 op0 = copy_rtx (op0);
7119 MEM_VOLATILE_P (op0) = 1;
7122 /* The following code doesn't handle CONCAT.
7123 Assume only bitpos == 0 can be used for CONCAT, due to
7124 one element arrays having the same mode as its element. */
7125 if (GET_CODE (op0) == CONCAT)
7127 if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
7132 /* In cases where an aligned union has an unaligned object
7133 as a field, we might be extracting a BLKmode value from
7134 an integer-mode (e.g., SImode) object. Handle this case
7135 by doing the extract into an object as wide as the field
7136 (which we know to be the width of a basic mode), then
7137 storing into memory, and changing the mode to BLKmode. */
7138 if (mode1 == VOIDmode
7139 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7140 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7141 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7142 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7143 && modifier != EXPAND_CONST_ADDRESS
7144 && modifier != EXPAND_INITIALIZER)
7145 /* If the field isn't aligned enough to fetch as a memref,
7146 fetch it as a bit field. */
7147 || (mode1 != BLKmode
7148 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7149 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
7150 || (GET_CODE (op0) == MEM
7151 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
7152 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
7153 && ((modifier == EXPAND_CONST_ADDRESS
7154 || modifier == EXPAND_INITIALIZER)
7156 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
7157 || (bitpos % BITS_PER_UNIT != 0)))
7158 /* If the type and the field are a constant size and the
7159 size of the type isn't the same size as the bitfield,
7160 we must use bitfield operations. */
7162 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7164 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7167 enum machine_mode ext_mode = mode;
7169 if (ext_mode == BLKmode
7170 && ! (target != 0 && GET_CODE (op0) == MEM
7171 && GET_CODE (target) == MEM
7172 && bitpos % BITS_PER_UNIT == 0))
7173 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7175 if (ext_mode == BLKmode)
7178 target = assign_temp (type, 0, 1, 1);
7183 /* In this case, BITPOS must start at a byte boundary and
7184 TARGET, if specified, must be a MEM. */
7185 if (GET_CODE (op0) != MEM
7186 || (target != 0 && GET_CODE (target) != MEM)
7187 || bitpos % BITS_PER_UNIT != 0)
7190 emit_block_move (target,
7191 adjust_address (op0, VOIDmode,
7192 bitpos / BITS_PER_UNIT),
7193 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7195 (modifier == EXPAND_STACK_PARM
7196 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7201 op0 = validize_mem (op0);
7203 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7204 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7206 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7207 (modifier == EXPAND_STACK_PARM
7208 ? NULL_RTX : target),
7210 int_size_in_bytes (TREE_TYPE (tem)));
7212 /* If the result is a record type and BITSIZE is narrower than
7213 the mode of OP0, an integral mode, and this is a big endian
7214 machine, we must put the field into the high-order bits. */
7215 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7216 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7217 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7218 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7219 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7223 if (mode == BLKmode)
7225 rtx new = assign_temp (build_qualified_type
7226 ((*lang_hooks.types.type_for_mode)
7228 TYPE_QUAL_CONST), 0, 1, 1);
7230 emit_move_insn (new, op0);
7231 op0 = copy_rtx (new);
7232 PUT_MODE (op0, BLKmode);
7233 set_mem_attributes (op0, exp, 1);
7239 /* If the result is BLKmode, use that to access the object
7241 if (mode == BLKmode)
7244 /* Get a reference to just this component. */
7245 if (modifier == EXPAND_CONST_ADDRESS
7246 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7247 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7249 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7251 if (op0 == orig_op0)
7252 op0 = copy_rtx (op0);
7254 set_mem_attributes (op0, exp, 0);
7255 if (GET_CODE (XEXP (op0, 0)) == REG)
7256 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7258 MEM_VOLATILE_P (op0) |= volatilep;
7259 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7260 || modifier == EXPAND_CONST_ADDRESS
7261 || modifier == EXPAND_INITIALIZER)
7263 else if (target == 0)
7264 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7266 convert_move (target, op0, unsignedp);
7272 rtx insn, before = get_last_insn (), vtbl_ref;
7274 /* Evaluate the interior expression. */
7275 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7278 /* Get or create an instruction off which to hang a note. */
7279 if (REG_P (subtarget))
7282 insn = get_last_insn ();
7285 if (! INSN_P (insn))
7286 insn = prev_nonnote_insn (insn);
7290 target = gen_reg_rtx (GET_MODE (subtarget));
7291 insn = emit_move_insn (target, subtarget);
7294 /* Collect the data for the note. */
7295 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7296 vtbl_ref = plus_constant (vtbl_ref,
7297 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7298 /* Discard the initial CONST that was added. */
7299 vtbl_ref = XEXP (vtbl_ref, 0);
7302 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7307 /* Intended for a reference to a buffer of a file-object in Pascal.
7308 But it's not certain that a special tree code will really be
7309 necessary for these. INDIRECT_REF might work for them. */
7315 /* Pascal set IN expression.
7318 rlo = set_low - (set_low%bits_per_word);
7319 the_word = set [ (index - rlo)/bits_per_word ];
7320 bit_index = index % bits_per_word;
7321 bitmask = 1 << bit_index;
7322 return !!(the_word & bitmask); */
7324 tree set = TREE_OPERAND (exp, 0);
7325 tree index = TREE_OPERAND (exp, 1);
7326 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7327 tree set_type = TREE_TYPE (set);
7328 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7329 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7330 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7331 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7332 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7333 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7334 rtx setaddr = XEXP (setval, 0);
7335 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7337 rtx diff, quo, rem, addr, bit, result;
7339 /* If domain is empty, answer is no. Likewise if index is constant
7340 and out of bounds. */
7341 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7342 && TREE_CODE (set_low_bound) == INTEGER_CST
7343 && tree_int_cst_lt (set_high_bound, set_low_bound))
7344 || (TREE_CODE (index) == INTEGER_CST
7345 && TREE_CODE (set_low_bound) == INTEGER_CST
7346 && tree_int_cst_lt (index, set_low_bound))
7347 || (TREE_CODE (set_high_bound) == INTEGER_CST
7348 && TREE_CODE (index) == INTEGER_CST
7349 && tree_int_cst_lt (set_high_bound, index))))
7353 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7355 /* If we get here, we have to generate the code for both cases
7356 (in range and out of range). */
7358 op0 = gen_label_rtx ();
7359 op1 = gen_label_rtx ();
7361 if (! (GET_CODE (index_val) == CONST_INT
7362 && GET_CODE (lo_r) == CONST_INT))
7363 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7364 GET_MODE (index_val), iunsignedp, op1);
7366 if (! (GET_CODE (index_val) == CONST_INT
7367 && GET_CODE (hi_r) == CONST_INT))
7368 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7369 GET_MODE (index_val), iunsignedp, op1);
7371 /* Calculate the element number of bit zero in the first word
7373 if (GET_CODE (lo_r) == CONST_INT)
7374 rlow = GEN_INT (INTVAL (lo_r)
7375 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7377 rlow = expand_binop (index_mode, and_optab, lo_r,
7378 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7379 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7381 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7382 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7384 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7385 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7386 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7387 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7389 addr = memory_address (byte_mode,
7390 expand_binop (index_mode, add_optab, diff,
7391 setaddr, NULL_RTX, iunsignedp,
7394 /* Extract the bit we want to examine. */
7395 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7396 gen_rtx_MEM (byte_mode, addr),
7397 make_tree (TREE_TYPE (index), rem),
7399 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7400 GET_MODE (target) == byte_mode ? target : 0,
7401 1, OPTAB_LIB_WIDEN);
7403 if (result != target)
7404 convert_move (target, result, 1);
7406 /* Output the code to handle the out-of-range case. */
7409 emit_move_insn (target, const0_rtx);
7414 case WITH_CLEANUP_EXPR:
7415 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7417 WITH_CLEANUP_EXPR_RTL (exp)
7418 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7419 expand_decl_cleanup_eh (NULL_TREE, TREE_OPERAND (exp, 1),
7420 CLEANUP_EH_ONLY (exp));
7422 /* That's it for this cleanup. */
7423 TREE_OPERAND (exp, 1) = 0;
7425 return WITH_CLEANUP_EXPR_RTL (exp);
7427 case CLEANUP_POINT_EXPR:
7429 /* Start a new binding layer that will keep track of all cleanup
7430 actions to be performed. */
7431 expand_start_bindings (2);
7433 target_temp_slot_level = temp_slot_level;
7435 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7436 /* If we're going to use this value, load it up now. */
7438 op0 = force_not_mem (op0);
7439 preserve_temp_slots (op0);
7440 expand_end_bindings (NULL_TREE, 0, 0);
7445 /* Check for a built-in function. */
7446 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7447 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7449 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7451 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7452 == BUILT_IN_FRONTEND)
7453 return (*lang_hooks.expand_expr) (exp, original_target,
7457 return expand_builtin (exp, target, subtarget, tmode, ignore);
7460 return expand_call (exp, target, ignore);
7462 case NON_LVALUE_EXPR:
7465 case REFERENCE_EXPR:
7466 if (TREE_OPERAND (exp, 0) == error_mark_node)
7469 if (TREE_CODE (type) == UNION_TYPE)
7471 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7473 /* If both input and output are BLKmode, this conversion isn't doing
7474 anything except possibly changing memory attribute. */
7475 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7477 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7480 result = copy_rtx (result);
7481 set_mem_attributes (result, exp, 0);
7487 if (TYPE_MODE (type) != BLKmode)
7488 target = gen_reg_rtx (TYPE_MODE (type));
7490 target = assign_temp (type, 0, 1, 1);
7493 if (GET_CODE (target) == MEM)
7494 /* Store data into beginning of memory target. */
7495 store_expr (TREE_OPERAND (exp, 0),
7496 adjust_address (target, TYPE_MODE (valtype), 0),
7497 modifier == EXPAND_STACK_PARM ? 2 : 0);
7499 else if (GET_CODE (target) == REG)
7500 /* Store this field into a union of the proper type. */
7501 store_field (target,
7502 MIN ((int_size_in_bytes (TREE_TYPE
7503 (TREE_OPERAND (exp, 0)))
7505 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7506 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7507 VOIDmode, 0, type, 0);
7511 /* Return the entire union. */
7515 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7517 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7520 /* If the signedness of the conversion differs and OP0 is
7521 a promoted SUBREG, clear that indication since we now
7522 have to do the proper extension. */
7523 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7524 && GET_CODE (op0) == SUBREG)
7525 SUBREG_PROMOTED_VAR_P (op0) = 0;
7530 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7531 if (GET_MODE (op0) == mode)
7534 /* If OP0 is a constant, just convert it into the proper mode. */
7535 if (CONSTANT_P (op0))
7537 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7538 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7540 if (modifier == EXPAND_INITIALIZER)
7541 return simplify_gen_subreg (mode, op0, inner_mode,
7542 subreg_lowpart_offset (mode,
7545 return convert_modes (mode, inner_mode, op0,
7546 TREE_UNSIGNED (inner_type));
7549 if (modifier == EXPAND_INITIALIZER)
7550 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7554 convert_to_mode (mode, op0,
7555 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7557 convert_move (target, op0,
7558 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7561 case VIEW_CONVERT_EXPR:
7562 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7564 /* If the input and output modes are both the same, we are done.
7565 Otherwise, if neither mode is BLKmode and both are integral and within
7566 a word, we can use gen_lowpart. If neither is true, make sure the
7567 operand is in memory and convert the MEM to the new mode. */
7568 if (TYPE_MODE (type) == GET_MODE (op0))
7570 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7571 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7572 && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT
7573 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7574 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7575 op0 = gen_lowpart (TYPE_MODE (type), op0);
7576 else if (GET_CODE (op0) != MEM)
7578 /* If the operand is not a MEM, force it into memory. Since we
7579 are going to be be changing the mode of the MEM, don't call
7580 force_const_mem for constants because we don't allow pool
7581 constants to change mode. */
7582 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7584 if (TREE_ADDRESSABLE (exp))
7587 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7589 = assign_stack_temp_for_type
7590 (TYPE_MODE (inner_type),
7591 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7593 emit_move_insn (target, op0);
7597 /* At this point, OP0 is in the correct mode. If the output type is such
7598 that the operand is known to be aligned, indicate that it is.
7599 Otherwise, we need only be concerned about alignment for non-BLKmode
7601 if (GET_CODE (op0) == MEM)
7603 op0 = copy_rtx (op0);
7605 if (TYPE_ALIGN_OK (type))
7606 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7607 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7608 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7610 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7611 HOST_WIDE_INT temp_size
7612 = MAX (int_size_in_bytes (inner_type),
7613 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7614 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7615 temp_size, 0, type);
7616 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7618 if (TREE_ADDRESSABLE (exp))
7621 if (GET_MODE (op0) == BLKmode)
7622 emit_block_move (new_with_op0_mode, op0,
7623 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7624 (modifier == EXPAND_STACK_PARM
7625 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7627 emit_move_insn (new_with_op0_mode, op0);
7632 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7638 this_optab = ! unsignedp && flag_trapv
7639 && (GET_MODE_CLASS (mode) == MODE_INT)
7640 ? addv_optab : add_optab;
7642 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7643 something else, make sure we add the register to the constant and
7644 then to the other thing. This case can occur during strength
7645 reduction and doing it this way will produce better code if the
7646 frame pointer or argument pointer is eliminated.
7648 fold-const.c will ensure that the constant is always in the inner
7649 PLUS_EXPR, so the only case we need to do anything about is if
7650 sp, ap, or fp is our second argument, in which case we must swap
7651 the innermost first argument and our second argument. */
7653 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7654 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7655 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7656 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7657 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7658 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7660 tree t = TREE_OPERAND (exp, 1);
7662 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7663 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7666 /* If the result is to be ptr_mode and we are adding an integer to
7667 something, we might be forming a constant. So try to use
7668 plus_constant. If it produces a sum and we can't accept it,
7669 use force_operand. This allows P = &ARR[const] to generate
7670 efficient code on machines where a SYMBOL_REF is not a valid
7673 If this is an EXPAND_SUM call, always return the sum. */
7674 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7675 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7677 if (modifier == EXPAND_STACK_PARM)
7679 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7680 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7681 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7685 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7687 /* Use immed_double_const to ensure that the constant is
7688 truncated according to the mode of OP1, then sign extended
7689 to a HOST_WIDE_INT. Using the constant directly can result
7690 in non-canonical RTL in a 64x32 cross compile. */
7692 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7694 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7695 op1 = plus_constant (op1, INTVAL (constant_part));
7696 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7697 op1 = force_operand (op1, target);
7701 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7702 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7703 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7707 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7708 (modifier == EXPAND_INITIALIZER
7709 ? EXPAND_INITIALIZER : EXPAND_SUM));
7710 if (! CONSTANT_P (op0))
7712 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7713 VOIDmode, modifier);
7714 /* Return a PLUS if modifier says it's OK. */
7715 if (modifier == EXPAND_SUM
7716 || modifier == EXPAND_INITIALIZER)
7717 return simplify_gen_binary (PLUS, mode, op0, op1);
7720 /* Use immed_double_const to ensure that the constant is
7721 truncated according to the mode of OP1, then sign extended
7722 to a HOST_WIDE_INT. Using the constant directly can result
7723 in non-canonical RTL in a 64x32 cross compile. */
7725 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7727 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7728 op0 = plus_constant (op0, INTVAL (constant_part));
7729 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7730 op0 = force_operand (op0, target);
7735 /* No sense saving up arithmetic to be done
7736 if it's all in the wrong mode to form part of an address.
7737 And force_operand won't know whether to sign-extend or
7739 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7740 || mode != ptr_mode)
7742 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7743 subtarget, &op0, &op1, 0);
7744 if (op0 == const0_rtx)
7746 if (op1 == const0_rtx)
7751 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7752 subtarget, &op0, &op1, modifier);
7753 return simplify_gen_binary (PLUS, mode, op0, op1);
7756 /* For initializers, we are allowed to return a MINUS of two
7757 symbolic constants. Here we handle all cases when both operands
7759 /* Handle difference of two symbolic constants,
7760 for the sake of an initializer. */
7761 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7762 && really_constant_p (TREE_OPERAND (exp, 0))
7763 && really_constant_p (TREE_OPERAND (exp, 1)))
7765 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7766 NULL_RTX, &op0, &op1, modifier);
7768 /* If the last operand is a CONST_INT, use plus_constant of
7769 the negated constant. Else make the MINUS. */
7770 if (GET_CODE (op1) == CONST_INT)
7771 return plus_constant (op0, - INTVAL (op1));
7773 return gen_rtx_MINUS (mode, op0, op1);
7776 this_optab = ! unsignedp && flag_trapv
7777 && (GET_MODE_CLASS(mode) == MODE_INT)
7778 ? subv_optab : sub_optab;
7780 /* No sense saving up arithmetic to be done
7781 if it's all in the wrong mode to form part of an address.
7782 And force_operand won't know whether to sign-extend or
7784 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7785 || mode != ptr_mode)
7788 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7789 subtarget, &op0, &op1, modifier);
7791 /* Convert A - const to A + (-const). */
7792 if (GET_CODE (op1) == CONST_INT)
7794 op1 = negate_rtx (mode, op1);
7795 return simplify_gen_binary (PLUS, mode, op0, op1);
7801 /* If first operand is constant, swap them.
7802 Thus the following special case checks need only
7803 check the second operand. */
7804 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7806 tree t1 = TREE_OPERAND (exp, 0);
7807 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7808 TREE_OPERAND (exp, 1) = t1;
7811 /* Attempt to return something suitable for generating an
7812 indexed address, for machines that support that. */
7814 if (modifier == EXPAND_SUM && mode == ptr_mode
7815 && host_integerp (TREE_OPERAND (exp, 1), 0))
7817 tree exp1 = TREE_OPERAND (exp, 1);
7819 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7822 if (GET_CODE (op0) != REG)
7823 op0 = force_operand (op0, NULL_RTX);
7824 if (GET_CODE (op0) != REG)
7825 op0 = copy_to_mode_reg (mode, op0);
7827 return gen_rtx_MULT (mode, op0,
7828 gen_int_mode (tree_low_cst (exp1, 0),
7829 TYPE_MODE (TREE_TYPE (exp1))));
7832 if (modifier == EXPAND_STACK_PARM)
7835 /* Check for multiplying things that have been extended
7836 from a narrower type. If this machine supports multiplying
7837 in that narrower type with a result in the desired type,
7838 do it that way, and avoid the explicit type-conversion. */
7839 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7840 && TREE_CODE (type) == INTEGER_TYPE
7841 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7842 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7843 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7844 && int_fits_type_p (TREE_OPERAND (exp, 1),
7845 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7846 /* Don't use a widening multiply if a shift will do. */
7847 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7848 > HOST_BITS_PER_WIDE_INT)
7849 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7851 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7852 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7854 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7855 /* If both operands are extended, they must either both
7856 be zero-extended or both be sign-extended. */
7857 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7859 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7861 enum machine_mode innermode
7862 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7863 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7864 ? smul_widen_optab : umul_widen_optab);
7865 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7866 ? umul_widen_optab : smul_widen_optab);
7867 if (mode == GET_MODE_WIDER_MODE (innermode))
7869 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7871 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7872 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7873 TREE_OPERAND (exp, 1),
7874 NULL_RTX, &op0, &op1, 0);
7876 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7877 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7878 NULL_RTX, &op0, &op1, 0);
7881 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7882 && innermode == word_mode)
7885 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7886 NULL_RTX, VOIDmode, 0);
7887 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7888 op1 = convert_modes (innermode, mode,
7889 expand_expr (TREE_OPERAND (exp, 1),
7890 NULL_RTX, VOIDmode, 0),
7893 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7894 NULL_RTX, VOIDmode, 0);
7895 temp = expand_binop (mode, other_optab, op0, op1, target,
7896 unsignedp, OPTAB_LIB_WIDEN);
7897 htem = expand_mult_highpart_adjust (innermode,
7898 gen_highpart (innermode, temp),
7900 gen_highpart (innermode, temp),
7902 emit_move_insn (gen_highpart (innermode, temp), htem);
7907 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7908 subtarget, &op0, &op1, 0);
7909 return expand_mult (mode, op0, op1, target, unsignedp);
7911 case TRUNC_DIV_EXPR:
7912 case FLOOR_DIV_EXPR:
7914 case ROUND_DIV_EXPR:
7915 case EXACT_DIV_EXPR:
7916 if (modifier == EXPAND_STACK_PARM)
7918 /* Possible optimization: compute the dividend with EXPAND_SUM
7919 then if the divisor is constant can optimize the case
7920 where some terms of the dividend have coeffs divisible by it. */
7921 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7922 subtarget, &op0, &op1, 0);
7923 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7926 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7927 expensive divide. If not, combine will rebuild the original
7929 if (flag_unsafe_math_optimizations && optimize && !optimize_size
7930 && TREE_CODE (type) == REAL_TYPE
7931 && !real_onep (TREE_OPERAND (exp, 0)))
7932 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7933 build (RDIV_EXPR, type,
7934 build_real (type, dconst1),
7935 TREE_OPERAND (exp, 1))),
7936 target, tmode, modifier);
7937 this_optab = sdiv_optab;
7940 case TRUNC_MOD_EXPR:
7941 case FLOOR_MOD_EXPR:
7943 case ROUND_MOD_EXPR:
7944 if (modifier == EXPAND_STACK_PARM)
7946 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7947 subtarget, &op0, &op1, 0);
7948 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7950 case FIX_ROUND_EXPR:
7951 case FIX_FLOOR_EXPR:
7953 abort (); /* Not used for C. */
7955 case FIX_TRUNC_EXPR:
7956 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7957 if (target == 0 || modifier == EXPAND_STACK_PARM)
7958 target = gen_reg_rtx (mode);
7959 expand_fix (target, op0, unsignedp);
7963 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7964 if (target == 0 || modifier == EXPAND_STACK_PARM)
7965 target = gen_reg_rtx (mode);
7966 /* expand_float can't figure out what to do if FROM has VOIDmode.
7967 So give it the correct mode. With -O, cse will optimize this. */
7968 if (GET_MODE (op0) == VOIDmode)
7969 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7971 expand_float (target, op0,
7972 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7976 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7977 if (modifier == EXPAND_STACK_PARM)
7979 temp = expand_unop (mode,
7980 ! unsignedp && flag_trapv
7981 && (GET_MODE_CLASS(mode) == MODE_INT)
7982 ? negv_optab : neg_optab, op0, target, 0);
7988 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7989 if (modifier == EXPAND_STACK_PARM)
7992 /* ABS_EXPR is not valid for complex arguments. */
7993 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7994 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7997 /* Unsigned abs is simply the operand. Testing here means we don't
7998 risk generating incorrect code below. */
7999 if (TREE_UNSIGNED (type))
8002 return expand_abs (mode, op0, target, unsignedp,
8003 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8007 target = original_target;
8009 || modifier == EXPAND_STACK_PARM
8010 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
8011 || GET_MODE (target) != mode
8012 || (GET_CODE (target) == REG
8013 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8014 target = gen_reg_rtx (mode);
8015 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8016 target, &op0, &op1, 0);
8018 /* First try to do it with a special MIN or MAX instruction.
8019 If that does not win, use a conditional jump to select the proper
8021 this_optab = (TREE_UNSIGNED (type)
8022 ? (code == MIN_EXPR ? umin_optab : umax_optab)
8023 : (code == MIN_EXPR ? smin_optab : smax_optab));
8025 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8030 /* At this point, a MEM target is no longer useful; we will get better
8033 if (GET_CODE (target) == MEM)
8034 target = gen_reg_rtx (mode);
8036 /* If op1 was placed in target, swap op0 and op1. */
8037 if (target != op0 && target == op1)
8045 emit_move_insn (target, op0);
8047 op0 = gen_label_rtx ();
8049 /* If this mode is an integer too wide to compare properly,
8050 compare word by word. Rely on cse to optimize constant cases. */
8051 if (GET_MODE_CLASS (mode) == MODE_INT
8052 && ! can_compare_p (GE, mode, ccp_jump))
8054 if (code == MAX_EXPR)
8055 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8056 target, op1, NULL_RTX, op0);
8058 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8059 op1, target, NULL_RTX, op0);
8063 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
8064 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
8065 unsignedp, mode, NULL_RTX, NULL_RTX,
8068 emit_move_insn (target, op1);
8073 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8074 if (modifier == EXPAND_STACK_PARM)
8076 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8081 /* ??? Can optimize bitwise operations with one arg constant.
8082 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8083 and (a bitwise1 b) bitwise2 b (etc)
8084 but that is probably not worth while. */
8086 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8087 boolean values when we want in all cases to compute both of them. In
8088 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8089 as actual zero-or-1 values and then bitwise anding. In cases where
8090 there cannot be any side effects, better code would be made by
8091 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8092 how to recognize those cases. */
8094 case TRUTH_AND_EXPR:
8096 this_optab = and_optab;
8101 this_optab = ior_optab;
8104 case TRUTH_XOR_EXPR:
8106 this_optab = xor_optab;
8113 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8115 if (modifier == EXPAND_STACK_PARM)
8117 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8118 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8121 /* Could determine the answer when only additive constants differ. Also,
8122 the addition of one can be handled by changing the condition. */
8129 case UNORDERED_EXPR:
8136 temp = do_store_flag (exp,
8137 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8138 tmode != VOIDmode ? tmode : mode, 0);
8142 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8143 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8145 && GET_CODE (original_target) == REG
8146 && (GET_MODE (original_target)
8147 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8149 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8152 /* If temp is constant, we can just compute the result. */
8153 if (GET_CODE (temp) == CONST_INT)
8155 if (INTVAL (temp) != 0)
8156 emit_move_insn (target, const1_rtx);
8158 emit_move_insn (target, const0_rtx);
8163 if (temp != original_target)
8165 enum machine_mode mode1 = GET_MODE (temp);
8166 if (mode1 == VOIDmode)
8167 mode1 = tmode != VOIDmode ? tmode : mode;
8169 temp = copy_to_mode_reg (mode1, temp);
8172 op1 = gen_label_rtx ();
8173 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8174 GET_MODE (temp), unsignedp, op1);
8175 emit_move_insn (temp, const1_rtx);
8180 /* If no set-flag instruction, must generate a conditional
8181 store into a temporary variable. Drop through
8182 and handle this like && and ||. */
8184 case TRUTH_ANDIF_EXPR:
8185 case TRUTH_ORIF_EXPR:
8188 || modifier == EXPAND_STACK_PARM
8189 || ! safe_from_p (target, exp, 1)
8190 /* Make sure we don't have a hard reg (such as function's return
8191 value) live across basic blocks, if not optimizing. */
8192 || (!optimize && GET_CODE (target) == REG
8193 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8194 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8197 emit_clr_insn (target);
8199 op1 = gen_label_rtx ();
8200 jumpifnot (exp, op1);
8203 emit_0_to_1_insn (target);
8206 return ignore ? const0_rtx : target;
8208 case TRUTH_NOT_EXPR:
8209 if (modifier == EXPAND_STACK_PARM)
8211 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8212 /* The parser is careful to generate TRUTH_NOT_EXPR
8213 only with operands that are always zero or one. */
8214 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8215 target, 1, OPTAB_LIB_WIDEN);
8221 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8223 return expand_expr_real (TREE_OPERAND (exp, 1),
8224 (ignore ? const0_rtx : target),
8225 VOIDmode, modifier, alt_rtl);
8228 /* If we would have a "singleton" (see below) were it not for a
8229 conversion in each arm, bring that conversion back out. */
8230 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8231 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8232 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8233 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8235 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8236 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8238 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8239 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8240 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8241 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8242 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8243 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8244 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8245 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8246 return expand_expr (build1 (NOP_EXPR, type,
8247 build (COND_EXPR, TREE_TYPE (iftrue),
8248 TREE_OPERAND (exp, 0),
8250 target, tmode, modifier);
8254 /* Note that COND_EXPRs whose type is a structure or union
8255 are required to be constructed to contain assignments of
8256 a temporary variable, so that we can evaluate them here
8257 for side effect only. If type is void, we must do likewise. */
8259 /* If an arm of the branch requires a cleanup,
8260 only that cleanup is performed. */
8263 tree binary_op = 0, unary_op = 0;
8265 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8266 convert it to our mode, if necessary. */
8267 if (integer_onep (TREE_OPERAND (exp, 1))
8268 && integer_zerop (TREE_OPERAND (exp, 2))
8269 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8273 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8278 if (modifier == EXPAND_STACK_PARM)
8280 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
8281 if (GET_MODE (op0) == mode)
8285 target = gen_reg_rtx (mode);
8286 convert_move (target, op0, unsignedp);
8290 /* Check for X ? A + B : A. If we have this, we can copy A to the
8291 output and conditionally add B. Similarly for unary operations.
8292 Don't do this if X has side-effects because those side effects
8293 might affect A or B and the "?" operation is a sequence point in
8294 ANSI. (operand_equal_p tests for side effects.) */
8296 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8297 && operand_equal_p (TREE_OPERAND (exp, 2),
8298 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8299 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8300 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8301 && operand_equal_p (TREE_OPERAND (exp, 1),
8302 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8303 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8304 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8305 && operand_equal_p (TREE_OPERAND (exp, 2),
8306 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8307 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8308 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8309 && operand_equal_p (TREE_OPERAND (exp, 1),
8310 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8311 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8313 /* If we are not to produce a result, we have no target. Otherwise,
8314 if a target was specified use it; it will not be used as an
8315 intermediate target unless it is safe. If no target, use a
8320 else if (modifier == EXPAND_STACK_PARM)
8321 temp = assign_temp (type, 0, 0, 1);
8322 else if (original_target
8323 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8324 || (singleton && GET_CODE (original_target) == REG
8325 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8326 && original_target == var_rtx (singleton)))
8327 && GET_MODE (original_target) == mode
8328 #ifdef HAVE_conditional_move
8329 && (! can_conditionally_move_p (mode)
8330 || GET_CODE (original_target) == REG
8331 || TREE_ADDRESSABLE (type))
8333 && (GET_CODE (original_target) != MEM
8334 || TREE_ADDRESSABLE (type)))
8335 temp = original_target;
8336 else if (TREE_ADDRESSABLE (type))
8339 temp = assign_temp (type, 0, 0, 1);
8341 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8342 do the test of X as a store-flag operation, do this as
8343 A + ((X != 0) << log C). Similarly for other simple binary
8344 operators. Only do for C == 1 if BRANCH_COST is low. */
8345 if (temp && singleton && binary_op
8346 && (TREE_CODE (binary_op) == PLUS_EXPR
8347 || TREE_CODE (binary_op) == MINUS_EXPR
8348 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8349 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8350 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8351 : integer_onep (TREE_OPERAND (binary_op, 1)))
8352 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8356 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8357 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8358 ? addv_optab : add_optab)
8359 : TREE_CODE (binary_op) == MINUS_EXPR
8360 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8361 ? subv_optab : sub_optab)
8362 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8365 /* If we had X ? A : A + 1, do this as A + (X == 0). */
8366 if (singleton == TREE_OPERAND (exp, 1))
8367 cond = invert_truthvalue (TREE_OPERAND (exp, 0));
8369 cond = TREE_OPERAND (exp, 0);
8371 result = do_store_flag (cond, (safe_from_p (temp, singleton, 1)
8373 mode, BRANCH_COST <= 1);
8375 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8376 result = expand_shift (LSHIFT_EXPR, mode, result,
8377 build_int_2 (tree_log2
8381 (safe_from_p (temp, singleton, 1)
8382 ? temp : NULL_RTX), 0);
8386 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8387 return expand_binop (mode, boptab, op1, result, temp,
8388 unsignedp, OPTAB_LIB_WIDEN);
8392 do_pending_stack_adjust ();
8394 op0 = gen_label_rtx ();
8396 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8400 /* If the target conflicts with the other operand of the
8401 binary op, we can't use it. Also, we can't use the target
8402 if it is a hard register, because evaluating the condition
8403 might clobber it. */
8405 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8406 || (GET_CODE (temp) == REG
8407 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8408 temp = gen_reg_rtx (mode);
8409 store_expr (singleton, temp,
8410 modifier == EXPAND_STACK_PARM ? 2 : 0);
8413 expand_expr (singleton,
8414 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8415 if (singleton == TREE_OPERAND (exp, 1))
8416 jumpif (TREE_OPERAND (exp, 0), op0);
8418 jumpifnot (TREE_OPERAND (exp, 0), op0);
8420 start_cleanup_deferral ();
8421 if (binary_op && temp == 0)
8422 /* Just touch the other operand. */
8423 expand_expr (TREE_OPERAND (binary_op, 1),
8424 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8426 store_expr (build (TREE_CODE (binary_op), type,
8427 make_tree (type, temp),
8428 TREE_OPERAND (binary_op, 1)),
8429 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8431 store_expr (build1 (TREE_CODE (unary_op), type,
8432 make_tree (type, temp)),
8433 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8436 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8437 comparison operator. If we have one of these cases, set the
8438 output to A, branch on A (cse will merge these two references),
8439 then set the output to FOO. */
8441 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8442 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8443 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8444 TREE_OPERAND (exp, 1), 0)
8445 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8446 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8447 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8449 if (GET_CODE (temp) == REG
8450 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8451 temp = gen_reg_rtx (mode);
8452 store_expr (TREE_OPERAND (exp, 1), temp,
8453 modifier == EXPAND_STACK_PARM ? 2 : 0);
8454 jumpif (TREE_OPERAND (exp, 0), op0);
8456 start_cleanup_deferral ();
8457 if (TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8458 store_expr (TREE_OPERAND (exp, 2), temp,
8459 modifier == EXPAND_STACK_PARM ? 2 : 0);
8461 expand_expr (TREE_OPERAND (exp, 2),
8462 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8466 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8467 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8468 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8469 TREE_OPERAND (exp, 2), 0)
8470 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8471 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8472 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8474 if (GET_CODE (temp) == REG
8475 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8476 temp = gen_reg_rtx (mode);
8477 store_expr (TREE_OPERAND (exp, 2), temp,
8478 modifier == EXPAND_STACK_PARM ? 2 : 0);
8479 jumpifnot (TREE_OPERAND (exp, 0), op0);
8481 start_cleanup_deferral ();
8482 if (TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8483 store_expr (TREE_OPERAND (exp, 1), temp,
8484 modifier == EXPAND_STACK_PARM ? 2 : 0);
8486 expand_expr (TREE_OPERAND (exp, 1),
8487 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8492 op1 = gen_label_rtx ();
8493 jumpifnot (TREE_OPERAND (exp, 0), op0);
8495 start_cleanup_deferral ();
8497 /* One branch of the cond can be void, if it never returns. For
8498 example A ? throw : E */
8500 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8501 store_expr (TREE_OPERAND (exp, 1), temp,
8502 modifier == EXPAND_STACK_PARM ? 2 : 0);
8504 expand_expr (TREE_OPERAND (exp, 1),
8505 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8506 end_cleanup_deferral ();
8508 emit_jump_insn (gen_jump (op1));
8511 start_cleanup_deferral ();
8513 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8514 store_expr (TREE_OPERAND (exp, 2), temp,
8515 modifier == EXPAND_STACK_PARM ? 2 : 0);
8517 expand_expr (TREE_OPERAND (exp, 2),
8518 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8521 end_cleanup_deferral ();
8532 /* Something needs to be initialized, but we didn't know
8533 where that thing was when building the tree. For example,
8534 it could be the return value of a function, or a parameter
8535 to a function which lays down in the stack, or a temporary
8536 variable which must be passed by reference.
8538 We guarantee that the expression will either be constructed
8539 or copied into our original target. */
8541 tree slot = TREE_OPERAND (exp, 0);
8542 tree cleanups = NULL_TREE;
8545 if (TREE_CODE (slot) != VAR_DECL)
8549 target = original_target;
8551 /* Set this here so that if we get a target that refers to a
8552 register variable that's already been used, put_reg_into_stack
8553 knows that it should fix up those uses. */
8554 TREE_USED (slot) = 1;
8558 if (DECL_RTL_SET_P (slot))
8560 target = DECL_RTL (slot);
8561 /* If we have already expanded the slot, so don't do
8563 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8568 target = assign_temp (type, 2, 0, 1);
8569 /* All temp slots at this level must not conflict. */
8570 preserve_temp_slots (target);
8571 SET_DECL_RTL (slot, target);
8572 if (TREE_ADDRESSABLE (slot))
8573 put_var_into_stack (slot, /*rescan=*/false);
8575 /* Since SLOT is not known to the called function
8576 to belong to its stack frame, we must build an explicit
8577 cleanup. This case occurs when we must build up a reference
8578 to pass the reference as an argument. In this case,
8579 it is very likely that such a reference need not be
8582 if (TREE_OPERAND (exp, 2) == 0)
8583 TREE_OPERAND (exp, 2)
8584 = (*lang_hooks.maybe_build_cleanup) (slot);
8585 cleanups = TREE_OPERAND (exp, 2);
8590 /* This case does occur, when expanding a parameter which
8591 needs to be constructed on the stack. The target
8592 is the actual stack address that we want to initialize.
8593 The function we call will perform the cleanup in this case. */
8595 /* If we have already assigned it space, use that space,
8596 not target that we were passed in, as our target
8597 parameter is only a hint. */
8598 if (DECL_RTL_SET_P (slot))
8600 target = DECL_RTL (slot);
8601 /* If we have already expanded the slot, so don't do
8603 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8608 SET_DECL_RTL (slot, target);
8609 /* If we must have an addressable slot, then make sure that
8610 the RTL that we just stored in slot is OK. */
8611 if (TREE_ADDRESSABLE (slot))
8612 put_var_into_stack (slot, /*rescan=*/true);
8616 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8617 /* Mark it as expanded. */
8618 TREE_OPERAND (exp, 1) = NULL_TREE;
8620 store_expr (exp1, target, modifier == EXPAND_STACK_PARM ? 2 : 0);
8622 expand_decl_cleanup_eh (NULL_TREE, cleanups, CLEANUP_EH_ONLY (exp));
8629 tree lhs = TREE_OPERAND (exp, 0);
8630 tree rhs = TREE_OPERAND (exp, 1);
8632 temp = expand_assignment (lhs, rhs, ! ignore);
8638 /* If lhs is complex, expand calls in rhs before computing it.
8639 That's so we don't compute a pointer and save it over a
8640 call. If lhs is simple, compute it first so we can give it
8641 as a target if the rhs is just a call. This avoids an
8642 extra temp and copy and that prevents a partial-subsumption
8643 which makes bad code. Actually we could treat
8644 component_ref's of vars like vars. */
8646 tree lhs = TREE_OPERAND (exp, 0);
8647 tree rhs = TREE_OPERAND (exp, 1);
8651 /* Check for |= or &= of a bitfield of size one into another bitfield
8652 of size 1. In this case, (unless we need the result of the
8653 assignment) we can do this more efficiently with a
8654 test followed by an assignment, if necessary.
8656 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8657 things change so we do, this code should be enhanced to
8660 && TREE_CODE (lhs) == COMPONENT_REF
8661 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8662 || TREE_CODE (rhs) == BIT_AND_EXPR)
8663 && TREE_OPERAND (rhs, 0) == lhs
8664 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8665 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8666 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8668 rtx label = gen_label_rtx ();
8670 do_jump (TREE_OPERAND (rhs, 1),
8671 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8672 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8673 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8674 (TREE_CODE (rhs) == BIT_IOR_EXPR
8676 : integer_zero_node)),
8678 do_pending_stack_adjust ();
8683 temp = expand_assignment (lhs, rhs, ! ignore);
8689 if (!TREE_OPERAND (exp, 0))
8690 expand_null_return ();
8692 expand_return (TREE_OPERAND (exp, 0));
8695 case PREINCREMENT_EXPR:
8696 case PREDECREMENT_EXPR:
8697 return expand_increment (exp, 0, ignore);
8699 case POSTINCREMENT_EXPR:
8700 case POSTDECREMENT_EXPR:
8701 /* Faster to treat as pre-increment if result is not used. */
8702 return expand_increment (exp, ! ignore, ignore);
8705 if (modifier == EXPAND_STACK_PARM)
8707 /* Are we taking the address of a nested function? */
8708 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8709 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8710 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8711 && ! TREE_STATIC (exp))
8713 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8714 op0 = force_operand (op0, target);
8716 /* If we are taking the address of something erroneous, just
8718 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8720 /* If we are taking the address of a constant and are at the
8721 top level, we have to use output_constant_def since we can't
8722 call force_const_mem at top level. */
8724 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8725 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
8727 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
8730 /* We make sure to pass const0_rtx down if we came in with
8731 ignore set, to avoid doing the cleanups twice for something. */
8732 op0 = expand_expr (TREE_OPERAND (exp, 0),
8733 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8734 (modifier == EXPAND_INITIALIZER
8735 ? modifier : EXPAND_CONST_ADDRESS));
8737 /* If we are going to ignore the result, OP0 will have been set
8738 to const0_rtx, so just return it. Don't get confused and
8739 think we are taking the address of the constant. */
8743 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8744 clever and returns a REG when given a MEM. */
8745 op0 = protect_from_queue (op0, 1);
8747 /* We would like the object in memory. If it is a constant, we can
8748 have it be statically allocated into memory. For a non-constant,
8749 we need to allocate some memory and store the value into it. */
8751 if (CONSTANT_P (op0))
8752 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8754 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8755 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8756 || GET_CODE (op0) == PARALLEL || GET_CODE (op0) == LO_SUM)
8758 /* If the operand is a SAVE_EXPR, we can deal with this by
8759 forcing the SAVE_EXPR into memory. */
8760 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
8762 put_var_into_stack (TREE_OPERAND (exp, 0),
8764 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
8768 /* If this object is in a register, it can't be BLKmode. */
8769 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8770 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8772 if (GET_CODE (op0) == PARALLEL)
8773 /* Handle calls that pass values in multiple
8774 non-contiguous locations. The Irix 6 ABI has examples
8776 emit_group_store (memloc, op0, inner_type,
8777 int_size_in_bytes (inner_type));
8779 emit_move_insn (memloc, op0);
8785 if (GET_CODE (op0) != MEM)
8788 mark_temp_addr_taken (op0);
8789 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8791 op0 = XEXP (op0, 0);
8792 if (GET_MODE (op0) == Pmode && mode == ptr_mode)
8793 op0 = convert_memory_address (ptr_mode, op0);
8797 /* If OP0 is not aligned as least as much as the type requires, we
8798 need to make a temporary, copy OP0 to it, and take the address of
8799 the temporary. We want to use the alignment of the type, not of
8800 the operand. Note that this is incorrect for FUNCTION_TYPE, but
8801 the test for BLKmode means that can't happen. The test for
8802 BLKmode is because we never make mis-aligned MEMs with
8805 We don't need to do this at all if the machine doesn't have
8806 strict alignment. */
8807 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
8808 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
8810 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
8812 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8815 if (TYPE_ALIGN_OK (inner_type))
8818 if (TREE_ADDRESSABLE (inner_type))
8820 /* We can't make a bitwise copy of this object, so fail. */
8821 error ("cannot take the address of an unaligned member");
8825 new = assign_stack_temp_for_type
8826 (TYPE_MODE (inner_type),
8827 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
8828 : int_size_in_bytes (inner_type),
8829 1, build_qualified_type (inner_type,
8830 (TYPE_QUALS (inner_type)
8831 | TYPE_QUAL_CONST)));
8833 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)),
8834 (modifier == EXPAND_STACK_PARM
8835 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
8840 op0 = force_operand (XEXP (op0, 0), target);
8844 && GET_CODE (op0) != REG
8845 && modifier != EXPAND_CONST_ADDRESS
8846 && modifier != EXPAND_INITIALIZER
8847 && modifier != EXPAND_SUM)
8848 op0 = force_reg (Pmode, op0);
8850 if (GET_CODE (op0) == REG
8851 && ! REG_USERVAR_P (op0))
8852 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8854 if (GET_MODE (op0) == Pmode && mode == ptr_mode)
8855 op0 = convert_memory_address (ptr_mode, op0);
8859 case ENTRY_VALUE_EXPR:
8862 /* COMPLEX type for Extended Pascal & Fortran */
8865 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8868 /* Get the rtx code of the operands. */
8869 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8870 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8873 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8877 /* Move the real (op0) and imaginary (op1) parts to their location. */
8878 emit_move_insn (gen_realpart (mode, target), op0);
8879 emit_move_insn (gen_imagpart (mode, target), op1);
8881 insns = get_insns ();
8884 /* Complex construction should appear as a single unit. */
8885 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8886 each with a separate pseudo as destination.
8887 It's not correct for flow to treat them as a unit. */
8888 if (GET_CODE (target) != CONCAT)
8889 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8897 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8898 return gen_realpart (mode, op0);
8901 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8902 return gen_imagpart (mode, op0);
8906 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8910 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8913 target = gen_reg_rtx (mode);
8917 /* Store the realpart and the negated imagpart to target. */
8918 emit_move_insn (gen_realpart (partmode, target),
8919 gen_realpart (partmode, op0));
8921 imag_t = gen_imagpart (partmode, target);
8922 temp = expand_unop (partmode,
8923 ! unsignedp && flag_trapv
8924 && (GET_MODE_CLASS(partmode) == MODE_INT)
8925 ? negv_optab : neg_optab,
8926 gen_imagpart (partmode, op0), imag_t, 0);
8928 emit_move_insn (imag_t, temp);
8930 insns = get_insns ();
8933 /* Conjugate should appear as a single unit
8934 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8935 each with a separate pseudo as destination.
8936 It's not correct for flow to treat them as a unit. */
8937 if (GET_CODE (target) != CONCAT)
8938 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8945 case TRY_CATCH_EXPR:
8947 tree handler = TREE_OPERAND (exp, 1);
8949 expand_eh_region_start ();
8951 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8953 expand_eh_region_end_cleanup (handler);
8958 case TRY_FINALLY_EXPR:
8960 tree try_block = TREE_OPERAND (exp, 0);
8961 tree finally_block = TREE_OPERAND (exp, 1);
8963 if (!optimize || unsafe_for_reeval (finally_block) > 1)
8965 /* In this case, wrapping FINALLY_BLOCK in an UNSAVE_EXPR
8966 is not sufficient, so we cannot expand the block twice.
8967 So we play games with GOTO_SUBROUTINE_EXPR to let us
8968 expand the thing only once. */
8969 /* When not optimizing, we go ahead with this form since
8970 (1) user breakpoints operate more predictably without
8971 code duplication, and
8972 (2) we're not running any of the global optimizers
8973 that would explode in time/space with the highly
8974 connected CFG created by the indirect branching. */
8976 rtx finally_label = gen_label_rtx ();
8977 rtx done_label = gen_label_rtx ();
8978 rtx return_link = gen_reg_rtx (Pmode);
8979 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8980 (tree) finally_label, (tree) return_link);
8981 TREE_SIDE_EFFECTS (cleanup) = 1;
8983 /* Start a new binding layer that will keep track of all cleanup
8984 actions to be performed. */
8985 expand_start_bindings (2);
8986 target_temp_slot_level = temp_slot_level;
8988 expand_decl_cleanup (NULL_TREE, cleanup);
8989 op0 = expand_expr (try_block, target, tmode, modifier);
8991 preserve_temp_slots (op0);
8992 expand_end_bindings (NULL_TREE, 0, 0);
8993 emit_jump (done_label);
8994 emit_label (finally_label);
8995 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8996 emit_indirect_jump (return_link);
8997 emit_label (done_label);
9001 expand_start_bindings (2);
9002 target_temp_slot_level = temp_slot_level;
9004 expand_decl_cleanup (NULL_TREE, finally_block);
9005 op0 = expand_expr (try_block, target, tmode, modifier);
9007 preserve_temp_slots (op0);
9008 expand_end_bindings (NULL_TREE, 0, 0);
9014 case GOTO_SUBROUTINE_EXPR:
9016 rtx subr = (rtx) TREE_OPERAND (exp, 0);
9017 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
9018 rtx return_address = gen_label_rtx ();
9019 emit_move_insn (return_link,
9020 gen_rtx_LABEL_REF (Pmode, return_address));
9022 emit_label (return_address);
9027 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
9030 return get_exception_pointer (cfun);
9033 /* Function descriptors are not valid except for as
9034 initialization constants, and should not be expanded. */
9038 return (*lang_hooks.expand_expr) (exp, original_target, tmode, modifier,
9042 /* Here to do an ordinary binary operator, generating an instruction
9043 from the optab already placed in `this_optab'. */
9045 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
9046 subtarget, &op0, &op1, 0);
9048 if (modifier == EXPAND_STACK_PARM)
9050 temp = expand_binop (mode, this_optab, op0, op1, target,
9051 unsignedp, OPTAB_LIB_WIDEN);
9057 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9058 when applied to the address of EXP produces an address known to be
9059 aligned more than BIGGEST_ALIGNMENT. */
9062 is_aligning_offset (tree offset, tree exp)
9064 /* Strip off any conversions and WITH_RECORD_EXPR nodes. */
9065 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9066 || TREE_CODE (offset) == NOP_EXPR
9067 || TREE_CODE (offset) == CONVERT_EXPR
9068 || TREE_CODE (offset) == WITH_RECORD_EXPR)
9069 offset = TREE_OPERAND (offset, 0);
9071 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9072 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9073 if (TREE_CODE (offset) != BIT_AND_EXPR
9074 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9075 || compare_tree_int (TREE_OPERAND (offset, 1), BIGGEST_ALIGNMENT) <= 0
9076 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9079 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9080 It must be NEGATE_EXPR. Then strip any more conversions. */
9081 offset = TREE_OPERAND (offset, 0);
9082 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9083 || TREE_CODE (offset) == NOP_EXPR
9084 || TREE_CODE (offset) == CONVERT_EXPR)
9085 offset = TREE_OPERAND (offset, 0);
9087 if (TREE_CODE (offset) != NEGATE_EXPR)
9090 offset = TREE_OPERAND (offset, 0);
9091 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9092 || TREE_CODE (offset) == NOP_EXPR
9093 || TREE_CODE (offset) == CONVERT_EXPR)
9094 offset = TREE_OPERAND (offset, 0);
9096 /* This must now be the address either of EXP or of a PLACEHOLDER_EXPR
9097 whose type is the same as EXP. */
9098 return (TREE_CODE (offset) == ADDR_EXPR
9099 && (TREE_OPERAND (offset, 0) == exp
9100 || (TREE_CODE (TREE_OPERAND (offset, 0)) == PLACEHOLDER_EXPR
9101 && (TREE_TYPE (TREE_OPERAND (offset, 0))
9102 == TREE_TYPE (exp)))));
9105 /* Return the tree node if an ARG corresponds to a string constant or zero
9106 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9107 in bytes within the string that ARG is accessing. The type of the
9108 offset will be `sizetype'. */
9111 string_constant (tree arg, tree *ptr_offset)
9115 if (TREE_CODE (arg) == ADDR_EXPR
9116 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9118 *ptr_offset = size_zero_node;
9119 return TREE_OPERAND (arg, 0);
9121 else if (TREE_CODE (arg) == PLUS_EXPR)
9123 tree arg0 = TREE_OPERAND (arg, 0);
9124 tree arg1 = TREE_OPERAND (arg, 1);
9129 if (TREE_CODE (arg0) == ADDR_EXPR
9130 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9132 *ptr_offset = convert (sizetype, arg1);
9133 return TREE_OPERAND (arg0, 0);
9135 else if (TREE_CODE (arg1) == ADDR_EXPR
9136 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9138 *ptr_offset = convert (sizetype, arg0);
9139 return TREE_OPERAND (arg1, 0);
9146 /* Expand code for a post- or pre- increment or decrement
9147 and return the RTX for the result.
9148 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9151 expand_increment (tree exp, int post, int ignore)
9155 tree incremented = TREE_OPERAND (exp, 0);
9156 optab this_optab = add_optab;
9158 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9159 int op0_is_copy = 0;
9160 int single_insn = 0;
9161 /* 1 means we can't store into OP0 directly,
9162 because it is a subreg narrower than a word,
9163 and we don't dare clobber the rest of the word. */
9166 /* Stabilize any component ref that might need to be
9167 evaluated more than once below. */
9169 || TREE_CODE (incremented) == BIT_FIELD_REF
9170 || (TREE_CODE (incremented) == COMPONENT_REF
9171 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9172 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9173 incremented = stabilize_reference (incremented);
9174 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9175 ones into save exprs so that they don't accidentally get evaluated
9176 more than once by the code below. */
9177 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9178 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9179 incremented = save_expr (incremented);
9181 /* Compute the operands as RTX.
9182 Note whether OP0 is the actual lvalue or a copy of it:
9183 I believe it is a copy iff it is a register or subreg
9184 and insns were generated in computing it. */
9186 temp = get_last_insn ();
9187 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
9189 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9190 in place but instead must do sign- or zero-extension during assignment,
9191 so we copy it into a new register and let the code below use it as
9194 Note that we can safely modify this SUBREG since it is know not to be
9195 shared (it was made by the expand_expr call above). */
9197 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9200 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9204 else if (GET_CODE (op0) == SUBREG
9205 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9207 /* We cannot increment this SUBREG in place. If we are
9208 post-incrementing, get a copy of the old value. Otherwise,
9209 just mark that we cannot increment in place. */
9211 op0 = copy_to_reg (op0);
9216 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9217 && temp != get_last_insn ());
9218 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9220 /* Decide whether incrementing or decrementing. */
9221 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9222 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9223 this_optab = sub_optab;
9225 /* Convert decrement by a constant into a negative increment. */
9226 if (this_optab == sub_optab
9227 && GET_CODE (op1) == CONST_INT)
9229 op1 = GEN_INT (-INTVAL (op1));
9230 this_optab = add_optab;
9233 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9234 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9236 /* For a preincrement, see if we can do this with a single instruction. */
9239 icode = (int) this_optab->handlers[(int) mode].insn_code;
9240 if (icode != (int) CODE_FOR_nothing
9241 /* Make sure that OP0 is valid for operands 0 and 1
9242 of the insn we want to queue. */
9243 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9244 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9245 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9249 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9250 then we cannot just increment OP0. We must therefore contrive to
9251 increment the original value. Then, for postincrement, we can return
9252 OP0 since it is a copy of the old value. For preincrement, expand here
9253 unless we can do it with a single insn.
9255 Likewise if storing directly into OP0 would clobber high bits
9256 we need to preserve (bad_subreg). */
9257 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9259 /* This is the easiest way to increment the value wherever it is.
9260 Problems with multiple evaluation of INCREMENTED are prevented
9261 because either (1) it is a component_ref or preincrement,
9262 in which case it was stabilized above, or (2) it is an array_ref
9263 with constant index in an array in a register, which is
9264 safe to reevaluate. */
9265 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9266 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9267 ? MINUS_EXPR : PLUS_EXPR),
9270 TREE_OPERAND (exp, 1));
9272 while (TREE_CODE (incremented) == NOP_EXPR
9273 || TREE_CODE (incremented) == CONVERT_EXPR)
9275 newexp = convert (TREE_TYPE (incremented), newexp);
9276 incremented = TREE_OPERAND (incremented, 0);
9279 temp = expand_assignment (incremented, newexp, ! post && ! ignore);
9280 return post ? op0 : temp;
9285 /* We have a true reference to the value in OP0.
9286 If there is an insn to add or subtract in this mode, queue it.
9287 Queuing the increment insn avoids the register shuffling
9288 that often results if we must increment now and first save
9289 the old value for subsequent use. */
9291 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9292 op0 = stabilize (op0);
9295 icode = (int) this_optab->handlers[(int) mode].insn_code;
9296 if (icode != (int) CODE_FOR_nothing
9297 /* Make sure that OP0 is valid for operands 0 and 1
9298 of the insn we want to queue. */
9299 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9300 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9302 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9303 op1 = force_reg (mode, op1);
9305 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9307 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9309 rtx addr = (general_operand (XEXP (op0, 0), mode)
9310 ? force_reg (Pmode, XEXP (op0, 0))
9311 : copy_to_reg (XEXP (op0, 0)));
9314 op0 = replace_equiv_address (op0, addr);
9315 temp = force_reg (GET_MODE (op0), op0);
9316 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9317 op1 = force_reg (mode, op1);
9319 /* The increment queue is LIFO, thus we have to `queue'
9320 the instructions in reverse order. */
9321 enqueue_insn (op0, gen_move_insn (op0, temp));
9322 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9327 /* Preincrement, or we can't increment with one simple insn. */
9329 /* Save a copy of the value before inc or dec, to return it later. */
9330 temp = value = copy_to_reg (op0);
9332 /* Arrange to return the incremented value. */
9333 /* Copy the rtx because expand_binop will protect from the queue,
9334 and the results of that would be invalid for us to return
9335 if our caller does emit_queue before using our result. */
9336 temp = copy_rtx (value = op0);
9338 /* Increment however we can. */
9339 op1 = expand_binop (mode, this_optab, value, op1, op0,
9340 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9342 /* Make sure the value is stored into OP0. */
9344 emit_move_insn (op0, op1);
9349 /* Generate code to calculate EXP using a store-flag instruction
9350 and return an rtx for the result. EXP is either a comparison
9351 or a TRUTH_NOT_EXPR whose operand is a comparison.
9353 If TARGET is nonzero, store the result there if convenient.
9355 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
9358 Return zero if there is no suitable set-flag instruction
9359 available on this machine.
9361 Once expand_expr has been called on the arguments of the comparison,
9362 we are committed to doing the store flag, since it is not safe to
9363 re-evaluate the expression. We emit the store-flag insn by calling
9364 emit_store_flag, but only expand the arguments if we have a reason
9365 to believe that emit_store_flag will be successful. If we think that
9366 it will, but it isn't, we have to simulate the store-flag with a
9367 set/jump/set sequence. */
9370 do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
9373 tree arg0, arg1, type;
9375 enum machine_mode operand_mode;
9379 enum insn_code icode;
9380 rtx subtarget = target;
9383 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9384 result at the end. We can't simply invert the test since it would
9385 have already been inverted if it were valid. This case occurs for
9386 some floating-point comparisons. */
9388 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9389 invert = 1, exp = TREE_OPERAND (exp, 0);
9391 arg0 = TREE_OPERAND (exp, 0);
9392 arg1 = TREE_OPERAND (exp, 1);
9394 /* Don't crash if the comparison was erroneous. */
9395 if (arg0 == error_mark_node || arg1 == error_mark_node)
9398 type = TREE_TYPE (arg0);
9399 operand_mode = TYPE_MODE (type);
9400 unsignedp = TREE_UNSIGNED (type);
9402 /* We won't bother with BLKmode store-flag operations because it would mean
9403 passing a lot of information to emit_store_flag. */
9404 if (operand_mode == BLKmode)
9407 /* We won't bother with store-flag operations involving function pointers
9408 when function pointers must be canonicalized before comparisons. */
9409 #ifdef HAVE_canonicalize_funcptr_for_compare
9410 if (HAVE_canonicalize_funcptr_for_compare
9411 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9412 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9414 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9415 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9416 == FUNCTION_TYPE))))
9423 /* Get the rtx comparison code to use. We know that EXP is a comparison
9424 operation of some type. Some comparisons against 1 and -1 can be
9425 converted to comparisons with zero. Do so here so that the tests
9426 below will be aware that we have a comparison with zero. These
9427 tests will not catch constants in the first operand, but constants
9428 are rarely passed as the first operand. */
9430 switch (TREE_CODE (exp))
9439 if (integer_onep (arg1))
9440 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9442 code = unsignedp ? LTU : LT;
9445 if (! unsignedp && integer_all_onesp (arg1))
9446 arg1 = integer_zero_node, code = LT;
9448 code = unsignedp ? LEU : LE;
9451 if (! unsignedp && integer_all_onesp (arg1))
9452 arg1 = integer_zero_node, code = GE;
9454 code = unsignedp ? GTU : GT;
9457 if (integer_onep (arg1))
9458 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9460 code = unsignedp ? GEU : GE;
9463 case UNORDERED_EXPR:
9489 /* Put a constant second. */
9490 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
9492 tem = arg0; arg0 = arg1; arg1 = tem;
9493 code = swap_condition (code);
9496 /* If this is an equality or inequality test of a single bit, we can
9497 do this by shifting the bit being tested to the low-order bit and
9498 masking the result with the constant 1. If the condition was EQ,
9499 we xor it with 1. This does not require an scc insn and is faster
9500 than an scc insn even if we have it.
9502 The code to make this transformation was moved into fold_single_bit_test,
9503 so we just call into the folder and expand its result. */
9505 if ((code == NE || code == EQ)
9506 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9507 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9509 tree type = (*lang_hooks.types.type_for_mode) (mode, unsignedp);
9510 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
9512 target, VOIDmode, EXPAND_NORMAL);
9515 /* Now see if we are likely to be able to do this. Return if not. */
9516 if (! can_compare_p (code, operand_mode, ccp_store_flag))
9519 icode = setcc_gen_code[(int) code];
9520 if (icode == CODE_FOR_nothing
9521 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
9523 /* We can only do this if it is one of the special cases that
9524 can be handled without an scc insn. */
9525 if ((code == LT && integer_zerop (arg1))
9526 || (! only_cheap && code == GE && integer_zerop (arg1)))
9528 else if (BRANCH_COST >= 0
9529 && ! only_cheap && (code == NE || code == EQ)
9530 && TREE_CODE (type) != REAL_TYPE
9531 && ((abs_optab->handlers[(int) operand_mode].insn_code
9532 != CODE_FOR_nothing)
9533 || (ffs_optab->handlers[(int) operand_mode].insn_code
9534 != CODE_FOR_nothing)))
9540 if (! get_subtarget (target)
9541 || GET_MODE (subtarget) != operand_mode)
9544 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
9547 target = gen_reg_rtx (mode);
9549 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
9550 because, if the emit_store_flag does anything it will succeed and
9551 OP0 and OP1 will not be used subsequently. */
9553 result = emit_store_flag (target, code,
9554 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
9555 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
9556 operand_mode, unsignedp, 1);
9561 result = expand_binop (mode, xor_optab, result, const1_rtx,
9562 result, 0, OPTAB_LIB_WIDEN);
9566 /* If this failed, we have to do this with set/compare/jump/set code. */
9567 if (GET_CODE (target) != REG
9568 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9569 target = gen_reg_rtx (GET_MODE (target));
9571 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9572 result = compare_from_rtx (op0, op1, code, unsignedp,
9573 operand_mode, NULL_RTX);
9574 if (GET_CODE (result) == CONST_INT)
9575 return (((result == const0_rtx && ! invert)
9576 || (result != const0_rtx && invert))
9577 ? const0_rtx : const1_rtx);
9579 /* The code of RESULT may not match CODE if compare_from_rtx
9580 decided to swap its operands and reverse the original code.
9582 We know that compare_from_rtx returns either a CONST_INT or
9583 a new comparison code, so it is safe to just extract the
9584 code from RESULT. */
9585 code = GET_CODE (result);
9587 label = gen_label_rtx ();
9588 if (bcc_gen_fctn[(int) code] == 0)
9591 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
9592 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9599 /* Stubs in case we haven't got a casesi insn. */
9601 # define HAVE_casesi 0
9602 # define gen_casesi(a, b, c, d, e) (0)
9603 # define CODE_FOR_casesi CODE_FOR_nothing
9606 /* If the machine does not have a case insn that compares the bounds,
9607 this means extra overhead for dispatch tables, which raises the
9608 threshold for using them. */
9609 #ifndef CASE_VALUES_THRESHOLD
9610 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
9611 #endif /* CASE_VALUES_THRESHOLD */
9614 case_values_threshold (void)
9616 return CASE_VALUES_THRESHOLD;
9619 /* Attempt to generate a casesi instruction. Returns 1 if successful,
9620 0 otherwise (i.e. if there is no casesi instruction). */
9622 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
9623 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
9625 enum machine_mode index_mode = SImode;
9626 int index_bits = GET_MODE_BITSIZE (index_mode);
9627 rtx op1, op2, index;
9628 enum machine_mode op_mode;
9633 /* Convert the index to SImode. */
9634 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
9636 enum machine_mode omode = TYPE_MODE (index_type);
9637 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
9639 /* We must handle the endpoints in the original mode. */
9640 index_expr = build (MINUS_EXPR, index_type,
9641 index_expr, minval);
9642 minval = integer_zero_node;
9643 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9644 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
9645 omode, 1, default_label);
9646 /* Now we can safely truncate. */
9647 index = convert_to_mode (index_mode, index, 0);
9651 if (TYPE_MODE (index_type) != index_mode)
9653 index_expr = convert ((*lang_hooks.types.type_for_size)
9654 (index_bits, 0), index_expr);
9655 index_type = TREE_TYPE (index_expr);
9658 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9661 index = protect_from_queue (index, 0);
9662 do_pending_stack_adjust ();
9664 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
9665 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
9667 index = copy_to_mode_reg (op_mode, index);
9669 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
9671 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
9672 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
9673 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
9674 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
9676 op1 = copy_to_mode_reg (op_mode, op1);
9678 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
9680 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
9681 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
9682 op2, TREE_UNSIGNED (TREE_TYPE (range)));
9683 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
9685 op2 = copy_to_mode_reg (op_mode, op2);
9687 emit_jump_insn (gen_casesi (index, op1, op2,
9688 table_label, default_label));
9692 /* Attempt to generate a tablejump instruction; same concept. */
9693 #ifndef HAVE_tablejump
9694 #define HAVE_tablejump 0
9695 #define gen_tablejump(x, y) (0)
9698 /* Subroutine of the next function.
9700 INDEX is the value being switched on, with the lowest value
9701 in the table already subtracted.
9702 MODE is its expected mode (needed if INDEX is constant).
9703 RANGE is the length of the jump table.
9704 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9706 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9707 index value is out of range. */
9710 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
9715 if (INTVAL (range) > cfun->max_jumptable_ents)
9716 cfun->max_jumptable_ents = INTVAL (range);
9718 /* Do an unsigned comparison (in the proper mode) between the index
9719 expression and the value which represents the length of the range.
9720 Since we just finished subtracting the lower bound of the range
9721 from the index expression, this comparison allows us to simultaneously
9722 check that the original index expression value is both greater than
9723 or equal to the minimum value of the range and less than or equal to
9724 the maximum value of the range. */
9726 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
9729 /* If index is in range, it must fit in Pmode.
9730 Convert to Pmode so we can index with it. */
9732 index = convert_to_mode (Pmode, index, 1);
9734 /* Don't let a MEM slip through, because then INDEX that comes
9735 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9736 and break_out_memory_refs will go to work on it and mess it up. */
9737 #ifdef PIC_CASE_VECTOR_ADDRESS
9738 if (flag_pic && GET_CODE (index) != REG)
9739 index = copy_to_mode_reg (Pmode, index);
9742 /* If flag_force_addr were to affect this address
9743 it could interfere with the tricky assumptions made
9744 about addresses that contain label-refs,
9745 which may be valid only very near the tablejump itself. */
9746 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9747 GET_MODE_SIZE, because this indicates how large insns are. The other
9748 uses should all be Pmode, because they are addresses. This code
9749 could fail if addresses and insns are not the same size. */
9750 index = gen_rtx_PLUS (Pmode,
9751 gen_rtx_MULT (Pmode, index,
9752 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9753 gen_rtx_LABEL_REF (Pmode, table_label));
9754 #ifdef PIC_CASE_VECTOR_ADDRESS
9756 index = PIC_CASE_VECTOR_ADDRESS (index);
9759 index = memory_address_noforce (CASE_VECTOR_MODE, index);
9760 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9761 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
9762 RTX_UNCHANGING_P (vector) = 1;
9763 MEM_NOTRAP_P (vector) = 1;
9764 convert_move (temp, vector, 0);
9766 emit_jump_insn (gen_tablejump (temp, table_label));
9768 /* If we are generating PIC code or if the table is PC-relative, the
9769 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9770 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
9775 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
9776 rtx table_label, rtx default_label)
9780 if (! HAVE_tablejump)
9783 index_expr = fold (build (MINUS_EXPR, index_type,
9784 convert (index_type, index_expr),
9785 convert (index_type, minval)));
9786 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9788 index = protect_from_queue (index, 0);
9789 do_pending_stack_adjust ();
9791 do_tablejump (index, TYPE_MODE (index_type),
9792 convert_modes (TYPE_MODE (index_type),
9793 TYPE_MODE (TREE_TYPE (range)),
9794 expand_expr (range, NULL_RTX,
9796 TREE_UNSIGNED (TREE_TYPE (range))),
9797 table_label, default_label);
9801 /* Nonzero if the mode is a valid vector mode for this architecture.
9802 This returns nonzero even if there is no hardware support for the
9803 vector mode, but we can emulate with narrower modes. */
9806 vector_mode_valid_p (enum machine_mode mode)
9808 enum mode_class class = GET_MODE_CLASS (mode);
9809 enum machine_mode innermode;
9811 /* Doh! What's going on? */
9812 if (class != MODE_VECTOR_INT
9813 && class != MODE_VECTOR_FLOAT)
9816 /* Hardware support. Woo hoo! */
9817 if (VECTOR_MODE_SUPPORTED_P (mode))
9820 innermode = GET_MODE_INNER (mode);
9822 /* We should probably return 1 if requesting V4DI and we have no DI,
9823 but we have V2DI, but this is probably very unlikely. */
9825 /* If we have support for the inner mode, we can safely emulate it.
9826 We may not have V2DI, but me can emulate with a pair of DIs. */
9827 return mov_optab->handlers[innermode].insn_code != CODE_FOR_nothing;
9830 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
9832 const_vector_from_tree (tree exp)
9837 enum machine_mode inner, mode;
9839 mode = TYPE_MODE (TREE_TYPE (exp));
9841 if (is_zeros_p (exp))
9842 return CONST0_RTX (mode);
9844 units = GET_MODE_NUNITS (mode);
9845 inner = GET_MODE_INNER (mode);
9847 v = rtvec_alloc (units);
9849 link = TREE_VECTOR_CST_ELTS (exp);
9850 for (i = 0; link; link = TREE_CHAIN (link), ++i)
9852 elt = TREE_VALUE (link);
9854 if (TREE_CODE (elt) == REAL_CST)
9855 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
9858 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
9859 TREE_INT_CST_HIGH (elt),
9863 /* Initialize remaining elements to 0. */
9864 for (; i < units; ++i)
9865 RTVEC_ELT (v, i) = CONST0_RTX (inner);
9867 return gen_rtx_raw_CONST_VECTOR (mode, v);
9870 #include "gt-expr.h"