1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 #include "coretypes.h"
32 #include "hard-reg-set.h"
35 #include "insn-config.h"
36 #include "insn-attr.h"
37 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
44 #include "typeclass.h"
47 #include "langhooks.h"
52 /* Decide whether a function's arguments should be processed
53 from first to last or from last to first.
55 They should if the stack and args grow in opposite directions, but
56 only if we have push insns. */
60 #ifndef PUSH_ARGS_REVERSED
61 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
62 #define PUSH_ARGS_REVERSED /* If it's last to first. */
68 #ifndef STACK_PUSH_CODE
69 #ifdef STACK_GROWS_DOWNWARD
70 #define STACK_PUSH_CODE PRE_DEC
72 #define STACK_PUSH_CODE PRE_INC
76 /* Assume that case vectors are not pc-relative. */
77 #ifndef CASE_VECTOR_PC_RELATIVE
78 #define CASE_VECTOR_PC_RELATIVE 0
81 /* Convert defined/undefined to boolean. */
82 #ifdef TARGET_MEM_FUNCTIONS
83 #undef TARGET_MEM_FUNCTIONS
84 #define TARGET_MEM_FUNCTIONS 1
86 #define TARGET_MEM_FUNCTIONS 0
90 /* If this is nonzero, we do not bother generating VOLATILE
91 around volatile memory references, and we are willing to
92 output indirect addresses. If cse is to follow, we reject
93 indirect addresses so a useful potential cse is generated;
94 if it is used only once, instruction combination will produce
95 the same indirect address eventually. */
98 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
99 tree placeholder_list = 0;
101 /* This structure is used by move_by_pieces to describe the move to
103 struct move_by_pieces
112 int explicit_inc_from;
113 unsigned HOST_WIDE_INT len;
114 HOST_WIDE_INT offset;
118 /* This structure is used by store_by_pieces to describe the clear to
121 struct store_by_pieces
127 unsigned HOST_WIDE_INT len;
128 HOST_WIDE_INT offset;
129 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode);
134 static rtx enqueue_insn (rtx, rtx);
135 static unsigned HOST_WIDE_INT move_by_pieces_ninsns (unsigned HOST_WIDE_INT,
137 static void move_by_pieces_1 (rtx (*) (rtx, ...), enum machine_mode,
138 struct move_by_pieces *);
139 static bool block_move_libcall_safe_for_call_parm (void);
140 static bool emit_block_move_via_movstr (rtx, rtx, rtx, unsigned);
141 static rtx emit_block_move_via_libcall (rtx, rtx, rtx);
142 static tree emit_block_move_libcall_fn (int);
143 static void emit_block_move_via_loop (rtx, rtx, rtx, unsigned);
144 static rtx clear_by_pieces_1 (void *, HOST_WIDE_INT, enum machine_mode);
145 static void clear_by_pieces (rtx, unsigned HOST_WIDE_INT, unsigned int);
146 static void store_by_pieces_1 (struct store_by_pieces *, unsigned int);
147 static void store_by_pieces_2 (rtx (*) (rtx, ...), enum machine_mode,
148 struct store_by_pieces *);
149 static bool clear_storage_via_clrstr (rtx, rtx, unsigned);
150 static rtx clear_storage_via_libcall (rtx, rtx);
151 static tree clear_storage_libcall_fn (int);
152 static rtx compress_float_constant (rtx, rtx);
153 static rtx get_subtarget (rtx);
154 static int is_zeros_p (tree);
155 static void store_constructor_field (rtx, unsigned HOST_WIDE_INT,
156 HOST_WIDE_INT, enum machine_mode,
157 tree, tree, int, int);
158 static void store_constructor (tree, rtx, int, HOST_WIDE_INT);
159 static rtx store_field (rtx, HOST_WIDE_INT, HOST_WIDE_INT, enum machine_mode,
160 tree, enum machine_mode, int, tree, int);
161 static rtx var_rtx (tree);
163 static unsigned HOST_WIDE_INT highest_pow2_factor (tree);
164 static unsigned HOST_WIDE_INT highest_pow2_factor_for_type (tree, tree);
166 static int is_aligning_offset (tree, tree);
167 static rtx expand_increment (tree, int, int);
168 static void expand_operands (tree, tree, rtx, rtx*, rtx*,
169 enum expand_modifier);
170 static rtx do_store_flag (tree, rtx, enum machine_mode, int);
172 static void emit_single_push_insn (enum machine_mode, rtx, tree);
174 static void do_tablejump (rtx, enum machine_mode, rtx, rtx, rtx);
175 static rtx const_vector_from_tree (tree);
177 /* Record for each mode whether we can move a register directly to or
178 from an object of that mode in memory. If we can't, we won't try
179 to use that mode directly when accessing a field of that mode. */
181 static char direct_load[NUM_MACHINE_MODES];
182 static char direct_store[NUM_MACHINE_MODES];
184 /* Record for each mode whether we can float-extend from memory. */
186 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
188 /* If a memory-to-memory move would take MOVE_RATIO or more simple
189 move-instruction sequences, we will do a movstr or libcall instead. */
192 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
195 /* If we are optimizing for space (-Os), cut down the default move ratio. */
196 #define MOVE_RATIO (optimize_size ? 3 : 15)
200 /* This macro is used to determine whether move_by_pieces should be called
201 to perform a structure copy. */
202 #ifndef MOVE_BY_PIECES_P
203 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
204 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
207 /* If a clear memory operation would take CLEAR_RATIO or more simple
208 move-instruction sequences, we will do a clrstr or libcall instead. */
211 #if defined (HAVE_clrstrqi) || defined (HAVE_clrstrhi) || defined (HAVE_clrstrsi) || defined (HAVE_clrstrdi) || defined (HAVE_clrstrti)
212 #define CLEAR_RATIO 2
214 /* If we are optimizing for space, cut down the default clear ratio. */
215 #define CLEAR_RATIO (optimize_size ? 3 : 15)
219 /* This macro is used to determine whether clear_by_pieces should be
220 called to clear storage. */
221 #ifndef CLEAR_BY_PIECES_P
222 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
223 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
226 /* This macro is used to determine whether store_by_pieces should be
227 called to "memset" storage with byte values other than zero, or
228 to "memcpy" storage when the source is a constant string. */
229 #ifndef STORE_BY_PIECES_P
230 #define STORE_BY_PIECES_P(SIZE, ALIGN) MOVE_BY_PIECES_P (SIZE, ALIGN)
233 /* This array records the insn_code of insns to perform block moves. */
234 enum insn_code movstr_optab[NUM_MACHINE_MODES];
236 /* This array records the insn_code of insns to perform block clears. */
237 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
239 /* These arrays record the insn_code of two different kinds of insns
240 to perform block compares. */
241 enum insn_code cmpstr_optab[NUM_MACHINE_MODES];
242 enum insn_code cmpmem_optab[NUM_MACHINE_MODES];
244 /* Stack of EXPR_WITH_FILE_LOCATION nested expressions. */
245 struct file_stack *expr_wfl_stack;
247 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
249 #ifndef SLOW_UNALIGNED_ACCESS
250 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
253 /* This is run once per compilation to set up which modes can be used
254 directly in memory and to initialize the block move optab. */
257 init_expr_once (void)
260 enum machine_mode mode;
265 /* Try indexing by frame ptr and try by stack ptr.
266 It is known that on the Convex the stack ptr isn't a valid index.
267 With luck, one or the other is valid on any machine. */
268 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
269 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
271 /* A scratch register we can modify in-place below to avoid
272 useless RTL allocations. */
273 reg = gen_rtx_REG (VOIDmode, -1);
275 insn = rtx_alloc (INSN);
276 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
277 PATTERN (insn) = pat;
279 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
280 mode = (enum machine_mode) ((int) mode + 1))
284 direct_load[(int) mode] = direct_store[(int) mode] = 0;
285 PUT_MODE (mem, mode);
286 PUT_MODE (mem1, mode);
287 PUT_MODE (reg, mode);
289 /* See if there is some register that can be used in this mode and
290 directly loaded or stored from memory. */
292 if (mode != VOIDmode && mode != BLKmode)
293 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
294 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
297 if (! HARD_REGNO_MODE_OK (regno, mode))
303 SET_DEST (pat) = reg;
304 if (recog (pat, insn, &num_clobbers) >= 0)
305 direct_load[(int) mode] = 1;
307 SET_SRC (pat) = mem1;
308 SET_DEST (pat) = reg;
309 if (recog (pat, insn, &num_clobbers) >= 0)
310 direct_load[(int) mode] = 1;
313 SET_DEST (pat) = mem;
314 if (recog (pat, insn, &num_clobbers) >= 0)
315 direct_store[(int) mode] = 1;
318 SET_DEST (pat) = mem1;
319 if (recog (pat, insn, &num_clobbers) >= 0)
320 direct_store[(int) mode] = 1;
324 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
326 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
327 mode = GET_MODE_WIDER_MODE (mode))
329 enum machine_mode srcmode;
330 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
331 srcmode = GET_MODE_WIDER_MODE (srcmode))
335 ic = can_extend_p (mode, srcmode, 0);
336 if (ic == CODE_FOR_nothing)
339 PUT_MODE (mem, srcmode);
341 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
342 float_extend_from_mem[mode][srcmode] = true;
347 /* This is run at the start of compiling a function. */
352 cfun->expr = ggc_alloc_cleared (sizeof (struct expr_status));
355 /* Small sanity check that the queue is empty at the end of a function. */
358 finish_expr_for_function (void)
364 /* Manage the queue of increment instructions to be output
365 for POSTINCREMENT_EXPR expressions, etc. */
367 /* Queue up to increment (or change) VAR later. BODY says how:
368 BODY should be the same thing you would pass to emit_insn
369 to increment right away. It will go to emit_insn later on.
371 The value is a QUEUED expression to be used in place of VAR
372 where you want to guarantee the pre-incrementation value of VAR. */
375 enqueue_insn (rtx var, rtx body)
377 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
378 body, pending_chain);
379 return pending_chain;
382 /* Use protect_from_queue to convert a QUEUED expression
383 into something that you can put immediately into an instruction.
384 If the queued incrementation has not happened yet,
385 protect_from_queue returns the variable itself.
386 If the incrementation has happened, protect_from_queue returns a temp
387 that contains a copy of the old value of the variable.
389 Any time an rtx which might possibly be a QUEUED is to be put
390 into an instruction, it must be passed through protect_from_queue first.
391 QUEUED expressions are not meaningful in instructions.
393 Do not pass a value through protect_from_queue and then hold
394 on to it for a while before putting it in an instruction!
395 If the queue is flushed in between, incorrect code will result. */
398 protect_from_queue (rtx x, int modify)
400 RTX_CODE code = GET_CODE (x);
402 #if 0 /* A QUEUED can hang around after the queue is forced out. */
403 /* Shortcut for most common case. */
404 if (pending_chain == 0)
410 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
411 use of autoincrement. Make a copy of the contents of the memory
412 location rather than a copy of the address, but not if the value is
413 of mode BLKmode. Don't modify X in place since it might be
415 if (code == MEM && GET_MODE (x) != BLKmode
416 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
419 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
423 rtx temp = gen_reg_rtx (GET_MODE (x));
425 emit_insn_before (gen_move_insn (temp, new),
430 /* Copy the address into a pseudo, so that the returned value
431 remains correct across calls to emit_queue. */
432 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
435 /* Otherwise, recursively protect the subexpressions of all
436 the kinds of rtx's that can contain a QUEUED. */
439 rtx tem = protect_from_queue (XEXP (x, 0), 0);
440 if (tem != XEXP (x, 0))
446 else if (code == PLUS || code == MULT)
448 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
449 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
450 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
459 /* If the increment has not happened, use the variable itself. Copy it
460 into a new pseudo so that the value remains correct across calls to
462 if (QUEUED_INSN (x) == 0)
463 return copy_to_reg (QUEUED_VAR (x));
464 /* If the increment has happened and a pre-increment copy exists,
466 if (QUEUED_COPY (x) != 0)
467 return QUEUED_COPY (x);
468 /* The increment has happened but we haven't set up a pre-increment copy.
469 Set one up now, and use it. */
470 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
471 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
473 return QUEUED_COPY (x);
476 /* Return nonzero if X contains a QUEUED expression:
477 if it contains anything that will be altered by a queued increment.
478 We handle only combinations of MEM, PLUS, MINUS and MULT operators
479 since memory addresses generally contain only those. */
482 queued_subexp_p (rtx x)
484 enum rtx_code code = GET_CODE (x);
490 return queued_subexp_p (XEXP (x, 0));
494 return (queued_subexp_p (XEXP (x, 0))
495 || queued_subexp_p (XEXP (x, 1)));
501 /* Perform all the pending incrementations. */
507 while ((p = pending_chain))
509 rtx body = QUEUED_BODY (p);
511 switch (GET_CODE (body))
519 QUEUED_INSN (p) = body;
523 #ifdef ENABLE_CHECKING
530 QUEUED_INSN (p) = emit_insn (body);
534 pending_chain = QUEUED_NEXT (p);
538 /* Copy data from FROM to TO, where the machine modes are not the same.
539 Both modes may be integer, or both may be floating.
540 UNSIGNEDP should be nonzero if FROM is an unsigned type.
541 This causes zero-extension instead of sign-extension. */
544 convert_move (rtx to, rtx from, int unsignedp)
546 enum machine_mode to_mode = GET_MODE (to);
547 enum machine_mode from_mode = GET_MODE (from);
548 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
549 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
553 /* rtx code for making an equivalent value. */
554 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
555 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
557 to = protect_from_queue (to, 1);
558 from = protect_from_queue (from, 0);
560 if (to_real != from_real)
563 /* If FROM is a SUBREG that indicates that we have already done at least
564 the required extension, strip it. We don't handle such SUBREGs as
567 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
568 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
569 >= GET_MODE_SIZE (to_mode))
570 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
571 from = gen_lowpart (to_mode, from), from_mode = to_mode;
573 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
576 if (to_mode == from_mode
577 || (from_mode == VOIDmode && CONSTANT_P (from)))
579 emit_move_insn (to, from);
583 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
585 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
588 if (VECTOR_MODE_P (to_mode))
589 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
591 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
593 emit_move_insn (to, from);
597 if (GET_CODE (to) == CONCAT && GET_CODE (from) == CONCAT)
599 convert_move (XEXP (to, 0), XEXP (from, 0), unsignedp);
600 convert_move (XEXP (to, 1), XEXP (from, 1), unsignedp);
609 if (GET_MODE_PRECISION (from_mode) < GET_MODE_PRECISION (to_mode))
611 else if (GET_MODE_PRECISION (from_mode) > GET_MODE_PRECISION (to_mode))
616 /* Try converting directly if the insn is supported. */
618 code = tab->handlers[to_mode][from_mode].insn_code;
619 if (code != CODE_FOR_nothing)
621 emit_unop_insn (code, to, from,
622 tab == sext_optab ? FLOAT_EXTEND : FLOAT_TRUNCATE);
626 /* Otherwise use a libcall. */
627 libcall = tab->handlers[to_mode][from_mode].libfunc;
630 /* This conversion is not implemented yet. */
634 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
636 insns = get_insns ();
638 emit_libcall_block (insns, to, value,
639 tab == trunc_optab ? gen_rtx_FLOAT_TRUNCATE (to_mode,
641 : gen_rtx_FLOAT_EXTEND (to_mode, from));
645 /* Handle pointer conversion. */ /* SPEE 900220. */
646 /* Targets are expected to provide conversion insns between PxImode and
647 xImode for all MODE_PARTIAL_INT modes they use, but no others. */
648 if (GET_MODE_CLASS (to_mode) == MODE_PARTIAL_INT)
650 enum machine_mode full_mode
651 = smallest_mode_for_size (GET_MODE_BITSIZE (to_mode), MODE_INT);
653 if (trunc_optab->handlers[to_mode][full_mode].insn_code
657 if (full_mode != from_mode)
658 from = convert_to_mode (full_mode, from, unsignedp);
659 emit_unop_insn (trunc_optab->handlers[to_mode][full_mode].insn_code,
663 if (GET_MODE_CLASS (from_mode) == MODE_PARTIAL_INT)
665 enum machine_mode full_mode
666 = smallest_mode_for_size (GET_MODE_BITSIZE (from_mode), MODE_INT);
668 if (sext_optab->handlers[full_mode][from_mode].insn_code
672 emit_unop_insn (sext_optab->handlers[full_mode][from_mode].insn_code,
674 if (to_mode == full_mode)
677 /* else proceed to integer conversions below */
678 from_mode = full_mode;
681 /* Now both modes are integers. */
683 /* Handle expanding beyond a word. */
684 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
685 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
692 enum machine_mode lowpart_mode;
693 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
695 /* Try converting directly if the insn is supported. */
696 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
699 /* If FROM is a SUBREG, put it into a register. Do this
700 so that we always generate the same set of insns for
701 better cse'ing; if an intermediate assignment occurred,
702 we won't be doing the operation directly on the SUBREG. */
703 if (optimize > 0 && GET_CODE (from) == SUBREG)
704 from = force_reg (from_mode, from);
705 emit_unop_insn (code, to, from, equiv_code);
708 /* Next, try converting via full word. */
709 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
710 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
711 != CODE_FOR_nothing))
713 if (GET_CODE (to) == REG)
714 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
715 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
716 emit_unop_insn (code, to,
717 gen_lowpart (word_mode, to), equiv_code);
721 /* No special multiword conversion insn; do it by hand. */
724 /* Since we will turn this into a no conflict block, we must ensure
725 that the source does not overlap the target. */
727 if (reg_overlap_mentioned_p (to, from))
728 from = force_reg (from_mode, from);
730 /* Get a copy of FROM widened to a word, if necessary. */
731 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
732 lowpart_mode = word_mode;
734 lowpart_mode = from_mode;
736 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
738 lowpart = gen_lowpart (lowpart_mode, to);
739 emit_move_insn (lowpart, lowfrom);
741 /* Compute the value to put in each remaining word. */
743 fill_value = const0_rtx;
748 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
749 && STORE_FLAG_VALUE == -1)
751 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
753 fill_value = gen_reg_rtx (word_mode);
754 emit_insn (gen_slt (fill_value));
760 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
761 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
763 fill_value = convert_to_mode (word_mode, fill_value, 1);
767 /* Fill the remaining words. */
768 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
770 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
771 rtx subword = operand_subword (to, index, 1, to_mode);
776 if (fill_value != subword)
777 emit_move_insn (subword, fill_value);
780 insns = get_insns ();
783 emit_no_conflict_block (insns, to, from, NULL_RTX,
784 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
788 /* Truncating multi-word to a word or less. */
789 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
790 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
792 if (!((GET_CODE (from) == MEM
793 && ! MEM_VOLATILE_P (from)
794 && direct_load[(int) to_mode]
795 && ! mode_dependent_address_p (XEXP (from, 0)))
796 || GET_CODE (from) == REG
797 || GET_CODE (from) == SUBREG))
798 from = force_reg (from_mode, from);
799 convert_move (to, gen_lowpart (word_mode, from), 0);
803 /* Now follow all the conversions between integers
804 no more than a word long. */
806 /* For truncation, usually we can just refer to FROM in a narrower mode. */
807 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
808 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
809 GET_MODE_BITSIZE (from_mode)))
811 if (!((GET_CODE (from) == MEM
812 && ! MEM_VOLATILE_P (from)
813 && direct_load[(int) to_mode]
814 && ! mode_dependent_address_p (XEXP (from, 0)))
815 || GET_CODE (from) == REG
816 || GET_CODE (from) == SUBREG))
817 from = force_reg (from_mode, from);
818 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
819 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
820 from = copy_to_reg (from);
821 emit_move_insn (to, gen_lowpart (to_mode, from));
825 /* Handle extension. */
826 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
828 /* Convert directly if that works. */
829 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
833 from = force_not_mem (from);
835 emit_unop_insn (code, to, from, equiv_code);
840 enum machine_mode intermediate;
844 /* Search for a mode to convert via. */
845 for (intermediate = from_mode; intermediate != VOIDmode;
846 intermediate = GET_MODE_WIDER_MODE (intermediate))
847 if (((can_extend_p (to_mode, intermediate, unsignedp)
849 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
850 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
851 GET_MODE_BITSIZE (intermediate))))
852 && (can_extend_p (intermediate, from_mode, unsignedp)
853 != CODE_FOR_nothing))
855 convert_move (to, convert_to_mode (intermediate, from,
856 unsignedp), unsignedp);
860 /* No suitable intermediate mode.
861 Generate what we need with shifts. */
862 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
863 - GET_MODE_BITSIZE (from_mode), 0);
864 from = gen_lowpart (to_mode, force_reg (from_mode, from));
865 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
867 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
870 emit_move_insn (to, tmp);
875 /* Support special truncate insns for certain modes. */
876 if (trunc_optab->handlers[to_mode][from_mode].insn_code != CODE_FOR_nothing)
878 emit_unop_insn (trunc_optab->handlers[to_mode][from_mode].insn_code,
883 /* Handle truncation of volatile memrefs, and so on;
884 the things that couldn't be truncated directly,
885 and for which there was no special instruction.
887 ??? Code above formerly short-circuited this, for most integer
888 mode pairs, with a force_reg in from_mode followed by a recursive
889 call to this routine. Appears always to have been wrong. */
890 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
892 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
893 emit_move_insn (to, temp);
897 /* Mode combination is not recognized. */
901 /* Return an rtx for a value that would result
902 from converting X to mode MODE.
903 Both X and MODE may be floating, or both integer.
904 UNSIGNEDP is nonzero if X is an unsigned value.
905 This can be done by referring to a part of X in place
906 or by copying to a new temporary with conversion.
908 This function *must not* call protect_from_queue
909 except when putting X into an insn (in which case convert_move does it). */
912 convert_to_mode (enum machine_mode mode, rtx x, int unsignedp)
914 return convert_modes (mode, VOIDmode, x, unsignedp);
917 /* Return an rtx for a value that would result
918 from converting X from mode OLDMODE to mode MODE.
919 Both modes may be floating, or both integer.
920 UNSIGNEDP is nonzero if X is an unsigned value.
922 This can be done by referring to a part of X in place
923 or by copying to a new temporary with conversion.
925 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
927 This function *must not* call protect_from_queue
928 except when putting X into an insn (in which case convert_move does it). */
931 convert_modes (enum machine_mode mode, enum machine_mode oldmode, rtx x, int unsignedp)
935 /* If FROM is a SUBREG that indicates that we have already done at least
936 the required extension, strip it. */
938 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
939 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
940 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
941 x = gen_lowpart (mode, x);
943 if (GET_MODE (x) != VOIDmode)
944 oldmode = GET_MODE (x);
949 /* There is one case that we must handle specially: If we are converting
950 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
951 we are to interpret the constant as unsigned, gen_lowpart will do
952 the wrong if the constant appears negative. What we want to do is
953 make the high-order word of the constant zero, not all ones. */
955 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
956 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
957 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
959 HOST_WIDE_INT val = INTVAL (x);
961 if (oldmode != VOIDmode
962 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
964 int width = GET_MODE_BITSIZE (oldmode);
966 /* We need to zero extend VAL. */
967 val &= ((HOST_WIDE_INT) 1 << width) - 1;
970 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
973 /* We can do this with a gen_lowpart if both desired and current modes
974 are integer, and this is either a constant integer, a register, or a
975 non-volatile MEM. Except for the constant case where MODE is no
976 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
978 if ((GET_CODE (x) == CONST_INT
979 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
980 || (GET_MODE_CLASS (mode) == MODE_INT
981 && GET_MODE_CLASS (oldmode) == MODE_INT
982 && (GET_CODE (x) == CONST_DOUBLE
983 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
984 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
985 && direct_load[(int) mode])
986 || (GET_CODE (x) == REG
987 && (! HARD_REGISTER_P (x)
988 || HARD_REGNO_MODE_OK (REGNO (x), mode))
989 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
990 GET_MODE_BITSIZE (GET_MODE (x)))))))))
992 /* ?? If we don't know OLDMODE, we have to assume here that
993 X does not need sign- or zero-extension. This may not be
994 the case, but it's the best we can do. */
995 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
996 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
998 HOST_WIDE_INT val = INTVAL (x);
999 int width = GET_MODE_BITSIZE (oldmode);
1001 /* We must sign or zero-extend in this case. Start by
1002 zero-extending, then sign extend if we need to. */
1003 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1005 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1006 val |= (HOST_WIDE_INT) (-1) << width;
1008 return gen_int_mode (val, mode);
1011 return gen_lowpart (mode, x);
1014 /* Converting from integer constant into mode is always equivalent to an
1015 subreg operation. */
1016 if (VECTOR_MODE_P (mode) && GET_MODE (x) == VOIDmode)
1018 if (GET_MODE_BITSIZE (mode) != GET_MODE_BITSIZE (oldmode))
1020 return simplify_gen_subreg (mode, x, oldmode, 0);
1023 temp = gen_reg_rtx (mode);
1024 convert_move (temp, x, unsignedp);
1028 /* STORE_MAX_PIECES is the number of bytes at a time that we can
1029 store efficiently. Due to internal GCC limitations, this is
1030 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1031 for an immediate constant. */
1033 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1035 /* Determine whether the LEN bytes can be moved by using several move
1036 instructions. Return nonzero if a call to move_by_pieces should
1040 can_move_by_pieces (unsigned HOST_WIDE_INT len,
1041 unsigned int align ATTRIBUTE_UNUSED)
1043 return MOVE_BY_PIECES_P (len, align);
1046 /* Generate several move instructions to copy LEN bytes from block FROM to
1047 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1048 and TO through protect_from_queue before calling.
1050 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1051 used to push FROM to the stack.
1053 ALIGN is maximum stack alignment we can assume.
1055 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
1056 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
1060 move_by_pieces (rtx to, rtx from, unsigned HOST_WIDE_INT len,
1061 unsigned int align, int endp)
1063 struct move_by_pieces data;
1064 rtx to_addr, from_addr = XEXP (from, 0);
1065 unsigned int max_size = MOVE_MAX_PIECES + 1;
1066 enum machine_mode mode = VOIDmode, tmode;
1067 enum insn_code icode;
1069 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
1072 data.from_addr = from_addr;
1075 to_addr = XEXP (to, 0);
1078 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1079 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1081 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1088 #ifdef STACK_GROWS_DOWNWARD
1094 data.to_addr = to_addr;
1097 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1098 || GET_CODE (from_addr) == POST_INC
1099 || GET_CODE (from_addr) == POST_DEC);
1101 data.explicit_inc_from = 0;
1102 data.explicit_inc_to = 0;
1103 if (data.reverse) data.offset = len;
1106 /* If copying requires more than two move insns,
1107 copy addresses to registers (to make displacements shorter)
1108 and use post-increment if available. */
1109 if (!(data.autinc_from && data.autinc_to)
1110 && move_by_pieces_ninsns (len, align) > 2)
1112 /* Find the mode of the largest move... */
1113 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1114 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1115 if (GET_MODE_SIZE (tmode) < max_size)
1118 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1120 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1121 data.autinc_from = 1;
1122 data.explicit_inc_from = -1;
1124 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1126 data.from_addr = copy_addr_to_reg (from_addr);
1127 data.autinc_from = 1;
1128 data.explicit_inc_from = 1;
1130 if (!data.autinc_from && CONSTANT_P (from_addr))
1131 data.from_addr = copy_addr_to_reg (from_addr);
1132 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1134 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1136 data.explicit_inc_to = -1;
1138 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1140 data.to_addr = copy_addr_to_reg (to_addr);
1142 data.explicit_inc_to = 1;
1144 if (!data.autinc_to && CONSTANT_P (to_addr))
1145 data.to_addr = copy_addr_to_reg (to_addr);
1148 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1149 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1150 align = MOVE_MAX * BITS_PER_UNIT;
1152 /* First move what we can in the largest integer mode, then go to
1153 successively smaller modes. */
1155 while (max_size > 1)
1157 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1158 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1159 if (GET_MODE_SIZE (tmode) < max_size)
1162 if (mode == VOIDmode)
1165 icode = mov_optab->handlers[(int) mode].insn_code;
1166 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1167 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1169 max_size = GET_MODE_SIZE (mode);
1172 /* The code above should have handled everything. */
1186 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1187 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1189 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1192 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1199 to1 = adjust_address (data.to, QImode, data.offset);
1207 /* Return number of insns required to move L bytes by pieces.
1208 ALIGN (in bits) is maximum alignment we can assume. */
1210 static unsigned HOST_WIDE_INT
1211 move_by_pieces_ninsns (unsigned HOST_WIDE_INT l, unsigned int align)
1213 unsigned HOST_WIDE_INT n_insns = 0;
1214 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1216 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1217 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1218 align = MOVE_MAX * BITS_PER_UNIT;
1220 while (max_size > 1)
1222 enum machine_mode mode = VOIDmode, tmode;
1223 enum insn_code icode;
1225 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1226 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1227 if (GET_MODE_SIZE (tmode) < max_size)
1230 if (mode == VOIDmode)
1233 icode = mov_optab->handlers[(int) mode].insn_code;
1234 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1235 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1237 max_size = GET_MODE_SIZE (mode);
1245 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1246 with move instructions for mode MODE. GENFUN is the gen_... function
1247 to make a move insn for that mode. DATA has all the other info. */
1250 move_by_pieces_1 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
1251 struct move_by_pieces *data)
1253 unsigned int size = GET_MODE_SIZE (mode);
1254 rtx to1 = NULL_RTX, from1;
1256 while (data->len >= size)
1259 data->offset -= size;
1263 if (data->autinc_to)
1264 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1267 to1 = adjust_address (data->to, mode, data->offset);
1270 if (data->autinc_from)
1271 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1274 from1 = adjust_address (data->from, mode, data->offset);
1276 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1277 emit_insn (gen_add2_insn (data->to_addr,
1278 GEN_INT (-(HOST_WIDE_INT)size)));
1279 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1280 emit_insn (gen_add2_insn (data->from_addr,
1281 GEN_INT (-(HOST_WIDE_INT)size)));
1284 emit_insn ((*genfun) (to1, from1));
1287 #ifdef PUSH_ROUNDING
1288 emit_single_push_insn (mode, from1, NULL);
1294 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1295 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1296 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1297 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1299 if (! data->reverse)
1300 data->offset += size;
1306 /* Emit code to move a block Y to a block X. This may be done with
1307 string-move instructions, with multiple scalar move instructions,
1308 or with a library call.
1310 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1311 SIZE is an rtx that says how long they are.
1312 ALIGN is the maximum alignment we can assume they have.
1313 METHOD describes what kind of copy this is, and what mechanisms may be used.
1315 Return the address of the new block, if memcpy is called and returns it,
1319 emit_block_move (rtx x, rtx y, rtx size, enum block_op_methods method)
1327 case BLOCK_OP_NORMAL:
1328 may_use_call = true;
1331 case BLOCK_OP_CALL_PARM:
1332 may_use_call = block_move_libcall_safe_for_call_parm ();
1334 /* Make inhibit_defer_pop nonzero around the library call
1335 to force it to pop the arguments right away. */
1339 case BLOCK_OP_NO_LIBCALL:
1340 may_use_call = false;
1347 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1349 if (GET_MODE (x) != BLKmode)
1351 if (GET_MODE (y) != BLKmode)
1354 x = protect_from_queue (x, 1);
1355 y = protect_from_queue (y, 0);
1356 size = protect_from_queue (size, 0);
1358 if (GET_CODE (x) != MEM)
1360 if (GET_CODE (y) != MEM)
1365 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1366 can be incorrect is coming from __builtin_memcpy. */
1367 if (GET_CODE (size) == CONST_INT)
1369 if (INTVAL (size) == 0)
1372 x = shallow_copy_rtx (x);
1373 y = shallow_copy_rtx (y);
1374 set_mem_size (x, size);
1375 set_mem_size (y, size);
1378 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1379 move_by_pieces (x, y, INTVAL (size), align, 0);
1380 else if (emit_block_move_via_movstr (x, y, size, align))
1382 else if (may_use_call)
1383 retval = emit_block_move_via_libcall (x, y, size);
1385 emit_block_move_via_loop (x, y, size, align);
1387 if (method == BLOCK_OP_CALL_PARM)
1393 /* A subroutine of emit_block_move. Returns true if calling the
1394 block move libcall will not clobber any parameters which may have
1395 already been placed on the stack. */
1398 block_move_libcall_safe_for_call_parm (void)
1400 /* If arguments are pushed on the stack, then they're safe. */
1404 /* If registers go on the stack anyway, any argument is sure to clobber
1405 an outgoing argument. */
1406 #if defined (REG_PARM_STACK_SPACE) && defined (OUTGOING_REG_PARM_STACK_SPACE)
1408 tree fn = emit_block_move_libcall_fn (false);
1410 if (REG_PARM_STACK_SPACE (fn) != 0)
1415 /* If any argument goes in memory, then it might clobber an outgoing
1418 CUMULATIVE_ARGS args_so_far;
1421 fn = emit_block_move_libcall_fn (false);
1422 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0);
1424 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1425 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1427 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1428 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1429 if (!tmp || !REG_P (tmp))
1431 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1432 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1436 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1442 /* A subroutine of emit_block_move. Expand a movstr pattern;
1443 return true if successful. */
1446 emit_block_move_via_movstr (rtx x, rtx y, rtx size, unsigned int align)
1448 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1449 enum machine_mode mode;
1451 /* Since this is a move insn, we don't care about volatility. */
1454 /* Try the most limited insn first, because there's no point
1455 including more than one in the machine description unless
1456 the more limited one has some advantage. */
1458 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1459 mode = GET_MODE_WIDER_MODE (mode))
1461 enum insn_code code = movstr_optab[(int) mode];
1462 insn_operand_predicate_fn pred;
1464 if (code != CODE_FOR_nothing
1465 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1466 here because if SIZE is less than the mode mask, as it is
1467 returned by the macro, it will definitely be less than the
1468 actual mode mask. */
1469 && ((GET_CODE (size) == CONST_INT
1470 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1471 <= (GET_MODE_MASK (mode) >> 1)))
1472 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1473 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1474 || (*pred) (x, BLKmode))
1475 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1476 || (*pred) (y, BLKmode))
1477 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1478 || (*pred) (opalign, VOIDmode)))
1481 rtx last = get_last_insn ();
1484 op2 = convert_to_mode (mode, size, 1);
1485 pred = insn_data[(int) code].operand[2].predicate;
1486 if (pred != 0 && ! (*pred) (op2, mode))
1487 op2 = copy_to_mode_reg (mode, op2);
1489 /* ??? When called via emit_block_move_for_call, it'd be
1490 nice if there were some way to inform the backend, so
1491 that it doesn't fail the expansion because it thinks
1492 emitting the libcall would be more efficient. */
1494 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1502 delete_insns_since (last);
1510 /* A subroutine of emit_block_move. Expand a call to memcpy or bcopy.
1511 Return the return value from memcpy, 0 otherwise. */
1514 emit_block_move_via_libcall (rtx dst, rtx src, rtx size)
1516 rtx dst_addr, src_addr;
1517 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1518 enum machine_mode size_mode;
1521 /* DST, SRC, or SIZE may have been passed through protect_from_queue.
1523 It is unsafe to save the value generated by protect_from_queue and reuse
1524 it later. Consider what happens if emit_queue is called before the
1525 return value from protect_from_queue is used.
1527 Expansion of the CALL_EXPR below will call emit_queue before we are
1528 finished emitting RTL for argument setup. So if we are not careful we
1529 could get the wrong value for an argument.
1531 To avoid this problem we go ahead and emit code to copy the addresses of
1532 DST and SRC and SIZE into new pseudos. We can then place those new
1533 pseudos into an RTL_EXPR and use them later, even after a call to
1536 Note this is not strictly needed for library calls since they do not call
1537 emit_queue before loading their arguments. However, we may need to have
1538 library calls call emit_queue in the future since failing to do so could
1539 cause problems for targets which define SMALL_REGISTER_CLASSES and pass
1540 arguments in registers. */
1542 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1543 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
1545 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1546 src_addr = convert_memory_address (ptr_mode, src_addr);
1548 dst_tree = make_tree (ptr_type_node, dst_addr);
1549 src_tree = make_tree (ptr_type_node, src_addr);
1551 if (TARGET_MEM_FUNCTIONS)
1552 size_mode = TYPE_MODE (sizetype);
1554 size_mode = TYPE_MODE (unsigned_type_node);
1556 size = convert_to_mode (size_mode, size, 1);
1557 size = copy_to_mode_reg (size_mode, size);
1559 /* It is incorrect to use the libcall calling conventions to call
1560 memcpy in this context. This could be a user call to memcpy and
1561 the user may wish to examine the return value from memcpy. For
1562 targets where libcalls and normal calls have different conventions
1563 for returning pointers, we could end up generating incorrect code.
1565 For convenience, we generate the call to bcopy this way as well. */
1567 if (TARGET_MEM_FUNCTIONS)
1568 size_tree = make_tree (sizetype, size);
1570 size_tree = make_tree (unsigned_type_node, size);
1572 fn = emit_block_move_libcall_fn (true);
1573 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1574 if (TARGET_MEM_FUNCTIONS)
1576 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1577 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1581 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1582 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1585 /* Now we have to build up the CALL_EXPR itself. */
1586 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1587 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1588 call_expr, arg_list, NULL_TREE);
1590 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1592 /* If we are initializing a readonly value, show the above call clobbered
1593 it. Otherwise, a load from it may erroneously be hoisted from a loop, or
1594 the delay slot scheduler might overlook conflicts and take nasty
1596 if (RTX_UNCHANGING_P (dst))
1597 add_function_usage_to
1598 (last_call_insn (), gen_rtx_EXPR_LIST (VOIDmode,
1599 gen_rtx_CLOBBER (VOIDmode, dst),
1602 return TARGET_MEM_FUNCTIONS ? retval : NULL_RTX;
1605 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1606 for the function we use for block copies. The first time FOR_CALL
1607 is true, we call assemble_external. */
1609 static GTY(()) tree block_move_fn;
1612 init_block_move_fn (const char *asmspec)
1618 if (TARGET_MEM_FUNCTIONS)
1620 fn = get_identifier ("memcpy");
1621 args = build_function_type_list (ptr_type_node, ptr_type_node,
1622 const_ptr_type_node, sizetype,
1627 fn = get_identifier ("bcopy");
1628 args = build_function_type_list (void_type_node, const_ptr_type_node,
1629 ptr_type_node, unsigned_type_node,
1633 fn = build_decl (FUNCTION_DECL, fn, args);
1634 DECL_EXTERNAL (fn) = 1;
1635 TREE_PUBLIC (fn) = 1;
1636 DECL_ARTIFICIAL (fn) = 1;
1637 TREE_NOTHROW (fn) = 1;
1644 SET_DECL_RTL (block_move_fn, NULL_RTX);
1645 SET_DECL_ASSEMBLER_NAME (block_move_fn, get_identifier (asmspec));
1650 emit_block_move_libcall_fn (int for_call)
1652 static bool emitted_extern;
1655 init_block_move_fn (NULL);
1657 if (for_call && !emitted_extern)
1659 emitted_extern = true;
1660 make_decl_rtl (block_move_fn, NULL);
1661 assemble_external (block_move_fn);
1664 return block_move_fn;
1667 /* A subroutine of emit_block_move. Copy the data via an explicit
1668 loop. This is used only when libcalls are forbidden. */
1669 /* ??? It'd be nice to copy in hunks larger than QImode. */
1672 emit_block_move_via_loop (rtx x, rtx y, rtx size,
1673 unsigned int align ATTRIBUTE_UNUSED)
1675 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
1676 enum machine_mode iter_mode;
1678 iter_mode = GET_MODE (size);
1679 if (iter_mode == VOIDmode)
1680 iter_mode = word_mode;
1682 top_label = gen_label_rtx ();
1683 cmp_label = gen_label_rtx ();
1684 iter = gen_reg_rtx (iter_mode);
1686 emit_move_insn (iter, const0_rtx);
1688 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
1689 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
1690 do_pending_stack_adjust ();
1692 emit_note (NOTE_INSN_LOOP_BEG);
1694 emit_jump (cmp_label);
1695 emit_label (top_label);
1697 tmp = convert_modes (Pmode, iter_mode, iter, true);
1698 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
1699 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
1700 x = change_address (x, QImode, x_addr);
1701 y = change_address (y, QImode, y_addr);
1703 emit_move_insn (x, y);
1705 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
1706 true, OPTAB_LIB_WIDEN);
1708 emit_move_insn (iter, tmp);
1710 emit_note (NOTE_INSN_LOOP_CONT);
1711 emit_label (cmp_label);
1713 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
1716 emit_note (NOTE_INSN_LOOP_END);
1719 /* Copy all or part of a value X into registers starting at REGNO.
1720 The number of registers to be filled is NREGS. */
1723 move_block_to_reg (int regno, rtx x, int nregs, enum machine_mode mode)
1726 #ifdef HAVE_load_multiple
1734 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1735 x = validize_mem (force_const_mem (mode, x));
1737 /* See if the machine can do this with a load multiple insn. */
1738 #ifdef HAVE_load_multiple
1739 if (HAVE_load_multiple)
1741 last = get_last_insn ();
1742 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
1750 delete_insns_since (last);
1754 for (i = 0; i < nregs; i++)
1755 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
1756 operand_subword_force (x, i, mode));
1759 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1760 The number of registers to be filled is NREGS. */
1763 move_block_from_reg (int regno, rtx x, int nregs)
1770 /* See if the machine can do this with a store multiple insn. */
1771 #ifdef HAVE_store_multiple
1772 if (HAVE_store_multiple)
1774 rtx last = get_last_insn ();
1775 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
1783 delete_insns_since (last);
1787 for (i = 0; i < nregs; i++)
1789 rtx tem = operand_subword (x, i, 1, BLKmode);
1794 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
1798 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
1799 ORIG, where ORIG is a non-consecutive group of registers represented by
1800 a PARALLEL. The clone is identical to the original except in that the
1801 original set of registers is replaced by a new set of pseudo registers.
1802 The new set has the same modes as the original set. */
1805 gen_group_rtx (rtx orig)
1810 if (GET_CODE (orig) != PARALLEL)
1813 length = XVECLEN (orig, 0);
1814 tmps = alloca (sizeof (rtx) * length);
1816 /* Skip a NULL entry in first slot. */
1817 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
1822 for (; i < length; i++)
1824 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
1825 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
1827 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
1830 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
1833 /* Emit code to move a block ORIG_SRC of type TYPE to a block DST,
1834 where DST is non-consecutive registers represented by a PARALLEL.
1835 SSIZE represents the total size of block ORIG_SRC in bytes, or -1
1839 emit_group_load (rtx dst, rtx orig_src, tree type ATTRIBUTE_UNUSED, int ssize)
1844 if (GET_CODE (dst) != PARALLEL)
1847 /* Check for a NULL entry, used to indicate that the parameter goes
1848 both on the stack and in registers. */
1849 if (XEXP (XVECEXP (dst, 0, 0), 0))
1854 tmps = alloca (sizeof (rtx) * XVECLEN (dst, 0));
1856 /* Process the pieces. */
1857 for (i = start; i < XVECLEN (dst, 0); i++)
1859 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1860 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1861 unsigned int bytelen = GET_MODE_SIZE (mode);
1864 /* Handle trailing fragments that run over the size of the struct. */
1865 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
1867 /* Arrange to shift the fragment to where it belongs.
1868 extract_bit_field loads to the lsb of the reg. */
1870 #ifdef BLOCK_REG_PADDING
1871 BLOCK_REG_PADDING (GET_MODE (orig_src), type, i == start)
1872 == (BYTES_BIG_ENDIAN ? upward : downward)
1877 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1878 bytelen = ssize - bytepos;
1883 /* If we won't be loading directly from memory, protect the real source
1884 from strange tricks we might play; but make sure that the source can
1885 be loaded directly into the destination. */
1887 if (GET_CODE (orig_src) != MEM
1888 && (!CONSTANT_P (orig_src)
1889 || (GET_MODE (orig_src) != mode
1890 && GET_MODE (orig_src) != VOIDmode)))
1892 if (GET_MODE (orig_src) == VOIDmode)
1893 src = gen_reg_rtx (mode);
1895 src = gen_reg_rtx (GET_MODE (orig_src));
1897 emit_move_insn (src, orig_src);
1900 /* Optimize the access just a bit. */
1901 if (GET_CODE (src) == MEM
1902 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (src))
1903 || MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode))
1904 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1905 && bytelen == GET_MODE_SIZE (mode))
1907 tmps[i] = gen_reg_rtx (mode);
1908 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
1910 else if (GET_CODE (src) == CONCAT)
1912 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
1913 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
1915 if ((bytepos == 0 && bytelen == slen0)
1916 || (bytepos != 0 && bytepos + bytelen <= slen))
1918 /* The following assumes that the concatenated objects all
1919 have the same size. In this case, a simple calculation
1920 can be used to determine the object and the bit field
1922 tmps[i] = XEXP (src, bytepos / slen0);
1923 if (! CONSTANT_P (tmps[i])
1924 && (GET_CODE (tmps[i]) != REG || GET_MODE (tmps[i]) != mode))
1925 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
1926 (bytepos % slen0) * BITS_PER_UNIT,
1927 1, NULL_RTX, mode, mode, ssize);
1929 else if (bytepos == 0)
1931 rtx mem = assign_stack_temp (GET_MODE (src), slen, 0);
1932 emit_move_insn (mem, src);
1933 tmps[i] = adjust_address (mem, mode, 0);
1938 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
1939 SIMD register, which is currently broken. While we get GCC
1940 to emit proper RTL for these cases, let's dump to memory. */
1941 else if (VECTOR_MODE_P (GET_MODE (dst))
1942 && GET_CODE (src) == REG)
1944 int slen = GET_MODE_SIZE (GET_MODE (src));
1947 mem = assign_stack_temp (GET_MODE (src), slen, 0);
1948 emit_move_insn (mem, src);
1949 tmps[i] = adjust_address (mem, mode, (int) bytepos);
1951 else if (CONSTANT_P (src)
1952 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
1955 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1956 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1960 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
1961 tmps[i], 0, OPTAB_WIDEN);
1966 /* Copy the extracted pieces into the proper (probable) hard regs. */
1967 for (i = start; i < XVECLEN (dst, 0); i++)
1968 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
1971 /* Emit code to move a block SRC to block DST, where SRC and DST are
1972 non-consecutive groups of registers, each represented by a PARALLEL. */
1975 emit_group_move (rtx dst, rtx src)
1979 if (GET_CODE (src) != PARALLEL
1980 || GET_CODE (dst) != PARALLEL
1981 || XVECLEN (src, 0) != XVECLEN (dst, 0))
1984 /* Skip first entry if NULL. */
1985 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
1986 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
1987 XEXP (XVECEXP (src, 0, i), 0));
1990 /* Emit code to move a block SRC to a block ORIG_DST of type TYPE,
1991 where SRC is non-consecutive registers represented by a PARALLEL.
1992 SSIZE represents the total size of block ORIG_DST, or -1 if not
1996 emit_group_store (rtx orig_dst, rtx src, tree type ATTRIBUTE_UNUSED, int ssize)
2001 if (GET_CODE (src) != PARALLEL)
2004 /* Check for a NULL entry, used to indicate that the parameter goes
2005 both on the stack and in registers. */
2006 if (XEXP (XVECEXP (src, 0, 0), 0))
2011 tmps = alloca (sizeof (rtx) * XVECLEN (src, 0));
2013 /* Copy the (probable) hard regs into pseudos. */
2014 for (i = start; i < XVECLEN (src, 0); i++)
2016 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2017 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2018 emit_move_insn (tmps[i], reg);
2022 /* If we won't be storing directly into memory, protect the real destination
2023 from strange tricks we might play. */
2025 if (GET_CODE (dst) == PARALLEL)
2029 /* We can get a PARALLEL dst if there is a conditional expression in
2030 a return statement. In that case, the dst and src are the same,
2031 so no action is necessary. */
2032 if (rtx_equal_p (dst, src))
2035 /* It is unclear if we can ever reach here, but we may as well handle
2036 it. Allocate a temporary, and split this into a store/load to/from
2039 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2040 emit_group_store (temp, src, type, ssize);
2041 emit_group_load (dst, temp, type, ssize);
2044 else if (GET_CODE (dst) != MEM && GET_CODE (dst) != CONCAT)
2046 dst = gen_reg_rtx (GET_MODE (orig_dst));
2047 /* Make life a bit easier for combine. */
2048 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
2051 /* Process the pieces. */
2052 for (i = start; i < XVECLEN (src, 0); i++)
2054 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2055 enum machine_mode mode = GET_MODE (tmps[i]);
2056 unsigned int bytelen = GET_MODE_SIZE (mode);
2059 /* Handle trailing fragments that run over the size of the struct. */
2060 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2062 /* store_bit_field always takes its value from the lsb.
2063 Move the fragment to the lsb if it's not already there. */
2065 #ifdef BLOCK_REG_PADDING
2066 BLOCK_REG_PADDING (GET_MODE (orig_dst), type, i == start)
2067 == (BYTES_BIG_ENDIAN ? upward : downward)
2073 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2074 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2075 tmps[i], 0, OPTAB_WIDEN);
2077 bytelen = ssize - bytepos;
2080 if (GET_CODE (dst) == CONCAT)
2082 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2083 dest = XEXP (dst, 0);
2084 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2086 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2087 dest = XEXP (dst, 1);
2089 else if (bytepos == 0 && XVECLEN (src, 0))
2091 dest = assign_stack_temp (GET_MODE (dest),
2092 GET_MODE_SIZE (GET_MODE (dest)), 0);
2093 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2102 /* Optimize the access just a bit. */
2103 if (GET_CODE (dest) == MEM
2104 && (! SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (dest))
2105 || MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode))
2106 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2107 && bytelen == GET_MODE_SIZE (mode))
2108 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2110 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2111 mode, tmps[i], ssize);
2116 /* Copy from the pseudo into the (probable) hard reg. */
2117 if (orig_dst != dst)
2118 emit_move_insn (orig_dst, dst);
2121 /* Generate code to copy a BLKmode object of TYPE out of a
2122 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2123 is null, a stack temporary is created. TGTBLK is returned.
2125 The purpose of this routine is to handle functions that return
2126 BLKmode structures in registers. Some machines (the PA for example)
2127 want to return all small structures in registers regardless of the
2128 structure's alignment. */
2131 copy_blkmode_from_reg (rtx tgtblk, rtx srcreg, tree type)
2133 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2134 rtx src = NULL, dst = NULL;
2135 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2136 unsigned HOST_WIDE_INT bitpos, xbitpos, padding_correction = 0;
2140 tgtblk = assign_temp (build_qualified_type (type,
2142 | TYPE_QUAL_CONST)),
2144 preserve_temp_slots (tgtblk);
2147 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2148 into a new pseudo which is a full word. */
2150 if (GET_MODE (srcreg) != BLKmode
2151 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2152 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2154 /* If the structure doesn't take up a whole number of words, see whether
2155 SRCREG is padded on the left or on the right. If it's on the left,
2156 set PADDING_CORRECTION to the number of bits to skip.
2158 In most ABIs, the structure will be returned at the least end of
2159 the register, which translates to right padding on little-endian
2160 targets and left padding on big-endian targets. The opposite
2161 holds if the structure is returned at the most significant
2162 end of the register. */
2163 if (bytes % UNITS_PER_WORD != 0
2164 && (targetm.calls.return_in_msb (type)
2166 : BYTES_BIG_ENDIAN))
2168 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2170 /* Copy the structure BITSIZE bites at a time.
2172 We could probably emit more efficient code for machines which do not use
2173 strict alignment, but it doesn't seem worth the effort at the current
2175 for (bitpos = 0, xbitpos = padding_correction;
2176 bitpos < bytes * BITS_PER_UNIT;
2177 bitpos += bitsize, xbitpos += bitsize)
2179 /* We need a new source operand each time xbitpos is on a
2180 word boundary and when xbitpos == padding_correction
2181 (the first time through). */
2182 if (xbitpos % BITS_PER_WORD == 0
2183 || xbitpos == padding_correction)
2184 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2187 /* We need a new destination operand each time bitpos is on
2189 if (bitpos % BITS_PER_WORD == 0)
2190 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2192 /* Use xbitpos for the source extraction (right justified) and
2193 xbitpos for the destination store (left justified). */
2194 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2195 extract_bit_field (src, bitsize,
2196 xbitpos % BITS_PER_WORD, 1,
2197 NULL_RTX, word_mode, word_mode,
2205 /* Add a USE expression for REG to the (possibly empty) list pointed
2206 to by CALL_FUSAGE. REG must denote a hard register. */
2209 use_reg (rtx *call_fusage, rtx reg)
2211 if (GET_CODE (reg) != REG
2212 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2216 = gen_rtx_EXPR_LIST (VOIDmode,
2217 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2220 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2221 starting at REGNO. All of these registers must be hard registers. */
2224 use_regs (rtx *call_fusage, int regno, int nregs)
2228 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2231 for (i = 0; i < nregs; i++)
2232 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2235 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2236 PARALLEL REGS. This is for calls that pass values in multiple
2237 non-contiguous locations. The Irix 6 ABI has examples of this. */
2240 use_group_regs (rtx *call_fusage, rtx regs)
2244 for (i = 0; i < XVECLEN (regs, 0); i++)
2246 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2248 /* A NULL entry means the parameter goes both on the stack and in
2249 registers. This can also be a MEM for targets that pass values
2250 partially on the stack and partially in registers. */
2251 if (reg != 0 && GET_CODE (reg) == REG)
2252 use_reg (call_fusage, reg);
2257 /* Determine whether the LEN bytes generated by CONSTFUN can be
2258 stored to memory using several move instructions. CONSTFUNDATA is
2259 a pointer which will be passed as argument in every CONSTFUN call.
2260 ALIGN is maximum alignment we can assume. Return nonzero if a
2261 call to store_by_pieces should succeed. */
2264 can_store_by_pieces (unsigned HOST_WIDE_INT len,
2265 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2266 void *constfundata, unsigned int align)
2268 unsigned HOST_WIDE_INT max_size, l;
2269 HOST_WIDE_INT offset = 0;
2270 enum machine_mode mode, tmode;
2271 enum insn_code icode;
2278 if (! STORE_BY_PIECES_P (len, align))
2281 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2282 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2283 align = MOVE_MAX * BITS_PER_UNIT;
2285 /* We would first store what we can in the largest integer mode, then go to
2286 successively smaller modes. */
2289 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2294 max_size = STORE_MAX_PIECES + 1;
2295 while (max_size > 1)
2297 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2298 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2299 if (GET_MODE_SIZE (tmode) < max_size)
2302 if (mode == VOIDmode)
2305 icode = mov_optab->handlers[(int) mode].insn_code;
2306 if (icode != CODE_FOR_nothing
2307 && align >= GET_MODE_ALIGNMENT (mode))
2309 unsigned int size = GET_MODE_SIZE (mode);
2316 cst = (*constfun) (constfundata, offset, mode);
2317 if (!LEGITIMATE_CONSTANT_P (cst))
2327 max_size = GET_MODE_SIZE (mode);
2330 /* The code above should have handled everything. */
2338 /* Generate several move instructions to store LEN bytes generated by
2339 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2340 pointer which will be passed as argument in every CONSTFUN call.
2341 ALIGN is maximum alignment we can assume.
2342 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2343 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2347 store_by_pieces (rtx to, unsigned HOST_WIDE_INT len,
2348 rtx (*constfun) (void *, HOST_WIDE_INT, enum machine_mode),
2349 void *constfundata, unsigned int align, int endp)
2351 struct store_by_pieces data;
2360 if (! STORE_BY_PIECES_P (len, align))
2362 to = protect_from_queue (to, 1);
2363 data.constfun = constfun;
2364 data.constfundata = constfundata;
2367 store_by_pieces_1 (&data, align);
2378 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2379 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2381 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2384 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2391 to1 = adjust_address (data.to, QImode, data.offset);
2399 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2400 rtx with BLKmode). The caller must pass TO through protect_from_queue
2401 before calling. ALIGN is maximum alignment we can assume. */
2404 clear_by_pieces (rtx to, unsigned HOST_WIDE_INT len, unsigned int align)
2406 struct store_by_pieces data;
2411 data.constfun = clear_by_pieces_1;
2412 data.constfundata = NULL;
2415 store_by_pieces_1 (&data, align);
2418 /* Callback routine for clear_by_pieces.
2419 Return const0_rtx unconditionally. */
2422 clear_by_pieces_1 (void *data ATTRIBUTE_UNUSED,
2423 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
2424 enum machine_mode mode ATTRIBUTE_UNUSED)
2429 /* Subroutine of clear_by_pieces and store_by_pieces.
2430 Generate several move instructions to store LEN bytes of block TO. (A MEM
2431 rtx with BLKmode). The caller must pass TO through protect_from_queue
2432 before calling. ALIGN is maximum alignment we can assume. */
2435 store_by_pieces_1 (struct store_by_pieces *data ATTRIBUTE_UNUSED,
2436 unsigned int align ATTRIBUTE_UNUSED)
2438 rtx to_addr = XEXP (data->to, 0);
2439 unsigned HOST_WIDE_INT max_size = STORE_MAX_PIECES + 1;
2440 enum machine_mode mode = VOIDmode, tmode;
2441 enum insn_code icode;
2444 data->to_addr = to_addr;
2446 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2447 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2449 data->explicit_inc_to = 0;
2451 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2453 data->offset = data->len;
2455 /* If storing requires more than two move insns,
2456 copy addresses to registers (to make displacements shorter)
2457 and use post-increment if available. */
2458 if (!data->autinc_to
2459 && move_by_pieces_ninsns (data->len, align) > 2)
2461 /* Determine the main mode we'll be using. */
2462 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2463 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2464 if (GET_MODE_SIZE (tmode) < max_size)
2467 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2469 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2470 data->autinc_to = 1;
2471 data->explicit_inc_to = -1;
2474 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2475 && ! data->autinc_to)
2477 data->to_addr = copy_addr_to_reg (to_addr);
2478 data->autinc_to = 1;
2479 data->explicit_inc_to = 1;
2482 if ( !data->autinc_to && CONSTANT_P (to_addr))
2483 data->to_addr = copy_addr_to_reg (to_addr);
2486 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2487 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2488 align = MOVE_MAX * BITS_PER_UNIT;
2490 /* First store what we can in the largest integer mode, then go to
2491 successively smaller modes. */
2493 while (max_size > 1)
2495 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2496 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2497 if (GET_MODE_SIZE (tmode) < max_size)
2500 if (mode == VOIDmode)
2503 icode = mov_optab->handlers[(int) mode].insn_code;
2504 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2505 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2507 max_size = GET_MODE_SIZE (mode);
2510 /* The code above should have handled everything. */
2515 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2516 with move instructions for mode MODE. GENFUN is the gen_... function
2517 to make a move insn for that mode. DATA has all the other info. */
2520 store_by_pieces_2 (rtx (*genfun) (rtx, ...), enum machine_mode mode,
2521 struct store_by_pieces *data)
2523 unsigned int size = GET_MODE_SIZE (mode);
2526 while (data->len >= size)
2529 data->offset -= size;
2531 if (data->autinc_to)
2532 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2535 to1 = adjust_address (data->to, mode, data->offset);
2537 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2538 emit_insn (gen_add2_insn (data->to_addr,
2539 GEN_INT (-(HOST_WIDE_INT) size)));
2541 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2542 emit_insn ((*genfun) (to1, cst));
2544 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2545 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2547 if (! data->reverse)
2548 data->offset += size;
2554 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2555 its length in bytes. */
2558 clear_storage (rtx object, rtx size)
2561 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2562 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2564 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2565 just move a zero. Otherwise, do this a piece at a time. */
2566 if (GET_MODE (object) != BLKmode
2567 && GET_CODE (size) == CONST_INT
2568 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
2569 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2572 object = protect_from_queue (object, 1);
2573 size = protect_from_queue (size, 0);
2575 if (size == const0_rtx)
2577 else if (GET_CODE (size) == CONST_INT
2578 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2579 clear_by_pieces (object, INTVAL (size), align);
2580 else if (clear_storage_via_clrstr (object, size, align))
2583 retval = clear_storage_via_libcall (object, size);
2589 /* A subroutine of clear_storage. Expand a clrstr pattern;
2590 return true if successful. */
2593 clear_storage_via_clrstr (rtx object, rtx size, unsigned int align)
2595 /* Try the most limited insn first, because there's no point
2596 including more than one in the machine description unless
2597 the more limited one has some advantage. */
2599 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2600 enum machine_mode mode;
2602 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2603 mode = GET_MODE_WIDER_MODE (mode))
2605 enum insn_code code = clrstr_optab[(int) mode];
2606 insn_operand_predicate_fn pred;
2608 if (code != CODE_FOR_nothing
2609 /* We don't need MODE to be narrower than
2610 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2611 the mode mask, as it is returned by the macro, it will
2612 definitely be less than the actual mode mask. */
2613 && ((GET_CODE (size) == CONST_INT
2614 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2615 <= (GET_MODE_MASK (mode) >> 1)))
2616 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2617 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2618 || (*pred) (object, BLKmode))
2619 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2620 || (*pred) (opalign, VOIDmode)))
2623 rtx last = get_last_insn ();
2626 op1 = convert_to_mode (mode, size, 1);
2627 pred = insn_data[(int) code].operand[1].predicate;
2628 if (pred != 0 && ! (*pred) (op1, mode))
2629 op1 = copy_to_mode_reg (mode, op1);
2631 pat = GEN_FCN ((int) code) (object, op1, opalign);
2638 delete_insns_since (last);
2645 /* A subroutine of clear_storage. Expand a call to memset or bzero.
2646 Return the return value of memset, 0 otherwise. */
2649 clear_storage_via_libcall (rtx object, rtx size)
2651 tree call_expr, arg_list, fn, object_tree, size_tree;
2652 enum machine_mode size_mode;
2655 /* OBJECT or SIZE may have been passed through protect_from_queue.
2657 It is unsafe to save the value generated by protect_from_queue
2658 and reuse it later. Consider what happens if emit_queue is
2659 called before the return value from protect_from_queue is used.
2661 Expansion of the CALL_EXPR below will call emit_queue before
2662 we are finished emitting RTL for argument setup. So if we are
2663 not careful we could get the wrong value for an argument.
2665 To avoid this problem we go ahead and emit code to copy OBJECT
2666 and SIZE into new pseudos. We can then place those new pseudos
2667 into an RTL_EXPR and use them later, even after a call to
2670 Note this is not strictly needed for library calls since they
2671 do not call emit_queue before loading their arguments. However,
2672 we may need to have library calls call emit_queue in the future
2673 since failing to do so could cause problems for targets which
2674 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2676 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
2678 if (TARGET_MEM_FUNCTIONS)
2679 size_mode = TYPE_MODE (sizetype);
2681 size_mode = TYPE_MODE (unsigned_type_node);
2682 size = convert_to_mode (size_mode, size, 1);
2683 size = copy_to_mode_reg (size_mode, size);
2685 /* It is incorrect to use the libcall calling conventions to call
2686 memset in this context. This could be a user call to memset and
2687 the user may wish to examine the return value from memset. For
2688 targets where libcalls and normal calls have different conventions
2689 for returning pointers, we could end up generating incorrect code.
2691 For convenience, we generate the call to bzero this way as well. */
2693 object_tree = make_tree (ptr_type_node, object);
2694 if (TARGET_MEM_FUNCTIONS)
2695 size_tree = make_tree (sizetype, size);
2697 size_tree = make_tree (unsigned_type_node, size);
2699 fn = clear_storage_libcall_fn (true);
2700 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
2701 if (TARGET_MEM_FUNCTIONS)
2702 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
2703 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
2705 /* Now we have to build up the CALL_EXPR itself. */
2706 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2707 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2708 call_expr, arg_list, NULL_TREE);
2710 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2712 /* If we are initializing a readonly value, show the above call
2713 clobbered it. Otherwise, a load from it may erroneously be
2714 hoisted from a loop. */
2715 if (RTX_UNCHANGING_P (object))
2716 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
2718 return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
2721 /* A subroutine of clear_storage_via_libcall. Create the tree node
2722 for the function we use for block clears. The first time FOR_CALL
2723 is true, we call assemble_external. */
2725 static GTY(()) tree block_clear_fn;
2728 init_block_clear_fn (const char *asmspec)
2730 if (!block_clear_fn)
2734 if (TARGET_MEM_FUNCTIONS)
2736 fn = get_identifier ("memset");
2737 args = build_function_type_list (ptr_type_node, ptr_type_node,
2738 integer_type_node, sizetype,
2743 fn = get_identifier ("bzero");
2744 args = build_function_type_list (void_type_node, ptr_type_node,
2745 unsigned_type_node, NULL_TREE);
2748 fn = build_decl (FUNCTION_DECL, fn, args);
2749 DECL_EXTERNAL (fn) = 1;
2750 TREE_PUBLIC (fn) = 1;
2751 DECL_ARTIFICIAL (fn) = 1;
2752 TREE_NOTHROW (fn) = 1;
2754 block_clear_fn = fn;
2759 SET_DECL_RTL (block_clear_fn, NULL_RTX);
2760 SET_DECL_ASSEMBLER_NAME (block_clear_fn, get_identifier (asmspec));
2765 clear_storage_libcall_fn (int for_call)
2767 static bool emitted_extern;
2769 if (!block_clear_fn)
2770 init_block_clear_fn (NULL);
2772 if (for_call && !emitted_extern)
2774 emitted_extern = true;
2775 make_decl_rtl (block_clear_fn, NULL);
2776 assemble_external (block_clear_fn);
2779 return block_clear_fn;
2782 /* Generate code to copy Y into X.
2783 Both Y and X must have the same mode, except that
2784 Y can be a constant with VOIDmode.
2785 This mode cannot be BLKmode; use emit_block_move for that.
2787 Return the last instruction emitted. */
2790 emit_move_insn (rtx x, rtx y)
2792 enum machine_mode mode = GET_MODE (x);
2793 rtx y_cst = NULL_RTX;
2796 x = protect_from_queue (x, 1);
2797 y = protect_from_queue (y, 0);
2799 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2802 /* Never force constant_p_rtx to memory. */
2803 if (GET_CODE (y) == CONSTANT_P_RTX)
2805 else if (CONSTANT_P (y))
2808 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
2809 && (last_insn = compress_float_constant (x, y)))
2814 if (!LEGITIMATE_CONSTANT_P (y))
2816 y = force_const_mem (mode, y);
2818 /* If the target's cannot_force_const_mem prevented the spill,
2819 assume that the target's move expanders will also take care
2820 of the non-legitimate constant. */
2826 /* If X or Y are memory references, verify that their addresses are valid
2828 if (GET_CODE (x) == MEM
2829 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2830 && ! push_operand (x, GET_MODE (x)))
2832 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2833 x = validize_mem (x);
2835 if (GET_CODE (y) == MEM
2836 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2838 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2839 y = validize_mem (y);
2841 if (mode == BLKmode)
2844 last_insn = emit_move_insn_1 (x, y);
2846 if (y_cst && GET_CODE (x) == REG
2847 && (set = single_set (last_insn)) != NULL_RTX
2848 && SET_DEST (set) == x
2849 && ! rtx_equal_p (y_cst, SET_SRC (set)))
2850 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
2855 /* Low level part of emit_move_insn.
2856 Called just like emit_move_insn, but assumes X and Y
2857 are basically valid. */
2860 emit_move_insn_1 (rtx x, rtx y)
2862 enum machine_mode mode = GET_MODE (x);
2863 enum machine_mode submode;
2864 enum mode_class class = GET_MODE_CLASS (mode);
2866 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
2869 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2871 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2873 /* Expand complex moves by moving real part and imag part, if possible. */
2874 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
2875 && BLKmode != (submode = GET_MODE_INNER (mode))
2876 && (mov_optab->handlers[(int) submode].insn_code
2877 != CODE_FOR_nothing))
2879 /* Don't split destination if it is a stack push. */
2880 int stack = push_operand (x, GET_MODE (x));
2882 #ifdef PUSH_ROUNDING
2883 /* In case we output to the stack, but the size is smaller than the
2884 machine can push exactly, we need to use move instructions. */
2886 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
2887 != GET_MODE_SIZE (submode)))
2890 HOST_WIDE_INT offset1, offset2;
2892 /* Do not use anti_adjust_stack, since we don't want to update
2893 stack_pointer_delta. */
2894 temp = expand_binop (Pmode,
2895 #ifdef STACK_GROWS_DOWNWARD
2903 (GET_MODE_SIZE (GET_MODE (x)))),
2904 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
2906 if (temp != stack_pointer_rtx)
2907 emit_move_insn (stack_pointer_rtx, temp);
2909 #ifdef STACK_GROWS_DOWNWARD
2911 offset2 = GET_MODE_SIZE (submode);
2913 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
2914 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
2915 + GET_MODE_SIZE (submode));
2918 emit_move_insn (change_address (x, submode,
2919 gen_rtx_PLUS (Pmode,
2921 GEN_INT (offset1))),
2922 gen_realpart (submode, y));
2923 emit_move_insn (change_address (x, submode,
2924 gen_rtx_PLUS (Pmode,
2926 GEN_INT (offset2))),
2927 gen_imagpart (submode, y));
2931 /* If this is a stack, push the highpart first, so it
2932 will be in the argument order.
2934 In that case, change_address is used only to convert
2935 the mode, not to change the address. */
2938 /* Note that the real part always precedes the imag part in memory
2939 regardless of machine's endianness. */
2940 #ifdef STACK_GROWS_DOWNWARD
2941 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2942 gen_imagpart (submode, y));
2943 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2944 gen_realpart (submode, y));
2946 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2947 gen_realpart (submode, y));
2948 emit_move_insn (gen_rtx_MEM (submode, XEXP (x, 0)),
2949 gen_imagpart (submode, y));
2954 rtx realpart_x, realpart_y;
2955 rtx imagpart_x, imagpart_y;
2957 /* If this is a complex value with each part being smaller than a
2958 word, the usual calling sequence will likely pack the pieces into
2959 a single register. Unfortunately, SUBREG of hard registers only
2960 deals in terms of words, so we have a problem converting input
2961 arguments to the CONCAT of two registers that is used elsewhere
2962 for complex values. If this is before reload, we can copy it into
2963 memory and reload. FIXME, we should see about using extract and
2964 insert on integer registers, but complex short and complex char
2965 variables should be rarely used. */
2966 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
2967 && (reload_in_progress | reload_completed) == 0)
2970 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2972 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2974 if (packed_dest_p || packed_src_p)
2976 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2977 ? MODE_FLOAT : MODE_INT);
2979 enum machine_mode reg_mode
2980 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2982 if (reg_mode != BLKmode)
2984 rtx mem = assign_stack_temp (reg_mode,
2985 GET_MODE_SIZE (mode), 0);
2986 rtx cmem = adjust_address (mem, mode, 0);
2989 = N_("function using short complex types cannot be inline");
2993 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2995 emit_move_insn_1 (cmem, y);
2996 return emit_move_insn_1 (sreg, mem);
3000 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
3002 emit_move_insn_1 (mem, sreg);
3003 return emit_move_insn_1 (x, cmem);
3009 realpart_x = gen_realpart (submode, x);
3010 realpart_y = gen_realpart (submode, y);
3011 imagpart_x = gen_imagpart (submode, x);
3012 imagpart_y = gen_imagpart (submode, y);
3014 /* Show the output dies here. This is necessary for SUBREGs
3015 of pseudos since we cannot track their lifetimes correctly;
3016 hard regs shouldn't appear here except as return values.
3017 We never want to emit such a clobber after reload. */
3019 && ! (reload_in_progress || reload_completed)
3020 && (GET_CODE (realpart_x) == SUBREG
3021 || GET_CODE (imagpart_x) == SUBREG))
3022 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3024 emit_move_insn (realpart_x, realpart_y);
3025 emit_move_insn (imagpart_x, imagpart_y);
3028 return get_last_insn ();
3031 /* Handle MODE_CC modes: If we don't have a special move insn for this mode,
3032 find a mode to do it in. If we have a movcc, use it. Otherwise,
3033 find the MODE_INT mode of the same width. */
3034 else if (GET_MODE_CLASS (mode) == MODE_CC
3035 && mov_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
3037 enum insn_code insn_code;
3038 enum machine_mode tmode = VOIDmode;
3042 && mov_optab->handlers[(int) CCmode].insn_code != CODE_FOR_nothing)
3045 for (tmode = QImode; tmode != VOIDmode;
3046 tmode = GET_MODE_WIDER_MODE (tmode))
3047 if (GET_MODE_SIZE (tmode) == GET_MODE_SIZE (mode))
3050 if (tmode == VOIDmode)
3053 /* Get X and Y in TMODE. We can't use gen_lowpart here because it
3054 may call change_address which is not appropriate if we were
3055 called when a reload was in progress. We don't have to worry
3056 about changing the address since the size in bytes is supposed to
3057 be the same. Copy the MEM to change the mode and move any
3058 substitutions from the old MEM to the new one. */
3060 if (reload_in_progress)
3062 x = gen_lowpart_common (tmode, x1);
3063 if (x == 0 && GET_CODE (x1) == MEM)
3065 x = adjust_address_nv (x1, tmode, 0);
3066 copy_replacements (x1, x);
3069 y = gen_lowpart_common (tmode, y1);
3070 if (y == 0 && GET_CODE (y1) == MEM)
3072 y = adjust_address_nv (y1, tmode, 0);
3073 copy_replacements (y1, y);
3078 x = gen_lowpart (tmode, x);
3079 y = gen_lowpart (tmode, y);
3082 insn_code = mov_optab->handlers[(int) tmode].insn_code;
3083 return emit_insn (GEN_FCN (insn_code) (x, y));
3086 /* Try using a move pattern for the corresponding integer mode. This is
3087 only safe when simplify_subreg can convert MODE constants into integer
3088 constants. At present, it can only do this reliably if the value
3089 fits within a HOST_WIDE_INT. */
3090 else if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3091 && (submode = int_mode_for_mode (mode)) != BLKmode
3092 && mov_optab->handlers[submode].insn_code != CODE_FOR_nothing)
3093 return emit_insn (GEN_FCN (mov_optab->handlers[submode].insn_code)
3094 (simplify_gen_subreg (submode, x, mode, 0),
3095 simplify_gen_subreg (submode, y, mode, 0)));
3097 /* This will handle any multi-word or full-word mode that lacks a move_insn
3098 pattern. However, you will get better code if you define such patterns,
3099 even if they must turn into multiple assembler instructions. */
3100 else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
3107 #ifdef PUSH_ROUNDING
3109 /* If X is a push on the stack, do the push now and replace
3110 X with a reference to the stack pointer. */
3111 if (push_operand (x, GET_MODE (x)))
3116 /* Do not use anti_adjust_stack, since we don't want to update
3117 stack_pointer_delta. */
3118 temp = expand_binop (Pmode,
3119 #ifdef STACK_GROWS_DOWNWARD
3127 (GET_MODE_SIZE (GET_MODE (x)))),
3128 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3130 if (temp != stack_pointer_rtx)
3131 emit_move_insn (stack_pointer_rtx, temp);
3133 code = GET_CODE (XEXP (x, 0));
3135 /* Just hope that small offsets off SP are OK. */
3136 if (code == POST_INC)
3137 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3138 GEN_INT (-((HOST_WIDE_INT)
3139 GET_MODE_SIZE (GET_MODE (x)))));
3140 else if (code == POST_DEC)
3141 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3142 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3144 temp = stack_pointer_rtx;
3146 x = change_address (x, VOIDmode, temp);
3150 /* If we are in reload, see if either operand is a MEM whose address
3151 is scheduled for replacement. */
3152 if (reload_in_progress && GET_CODE (x) == MEM
3153 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3154 x = replace_equiv_address_nv (x, inner);
3155 if (reload_in_progress && GET_CODE (y) == MEM
3156 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3157 y = replace_equiv_address_nv (y, inner);
3163 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3166 rtx xpart = operand_subword (x, i, 1, mode);
3167 rtx ypart = operand_subword (y, i, 1, mode);
3169 /* If we can't get a part of Y, put Y into memory if it is a
3170 constant. Otherwise, force it into a register. If we still
3171 can't get a part of Y, abort. */
3172 if (ypart == 0 && CONSTANT_P (y))
3174 y = force_const_mem (mode, y);
3175 ypart = operand_subword (y, i, 1, mode);
3177 else if (ypart == 0)
3178 ypart = operand_subword_force (y, i, mode);
3180 if (xpart == 0 || ypart == 0)
3183 need_clobber |= (GET_CODE (xpart) == SUBREG);
3185 last_insn = emit_move_insn (xpart, ypart);
3191 /* Show the output dies here. This is necessary for SUBREGs
3192 of pseudos since we cannot track their lifetimes correctly;
3193 hard regs shouldn't appear here except as return values.
3194 We never want to emit such a clobber after reload. */
3196 && ! (reload_in_progress || reload_completed)
3197 && need_clobber != 0)
3198 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3208 /* If Y is representable exactly in a narrower mode, and the target can
3209 perform the extension directly from constant or memory, then emit the
3210 move as an extension. */
3213 compress_float_constant (rtx x, rtx y)
3215 enum machine_mode dstmode = GET_MODE (x);
3216 enum machine_mode orig_srcmode = GET_MODE (y);
3217 enum machine_mode srcmode;
3220 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3222 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3223 srcmode != orig_srcmode;
3224 srcmode = GET_MODE_WIDER_MODE (srcmode))
3227 rtx trunc_y, last_insn;
3229 /* Skip if the target can't extend this way. */
3230 ic = can_extend_p (dstmode, srcmode, 0);
3231 if (ic == CODE_FOR_nothing)
3234 /* Skip if the narrowed value isn't exact. */
3235 if (! exact_real_truncate (srcmode, &r))
3238 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3240 if (LEGITIMATE_CONSTANT_P (trunc_y))
3242 /* Skip if the target needs extra instructions to perform
3244 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3247 else if (float_extend_from_mem[dstmode][srcmode])
3248 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
3252 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3253 last_insn = get_last_insn ();
3255 if (GET_CODE (x) == REG)
3256 set_unique_reg_note (last_insn, REG_EQUAL, y);
3264 /* Pushing data onto the stack. */
3266 /* Push a block of length SIZE (perhaps variable)
3267 and return an rtx to address the beginning of the block.
3268 Note that it is not possible for the value returned to be a QUEUED.
3269 The value may be virtual_outgoing_args_rtx.
3271 EXTRA is the number of bytes of padding to push in addition to SIZE.
3272 BELOW nonzero means this padding comes at low addresses;
3273 otherwise, the padding comes at high addresses. */
3276 push_block (rtx size, int extra, int below)
3280 size = convert_modes (Pmode, ptr_mode, size, 1);
3281 if (CONSTANT_P (size))
3282 anti_adjust_stack (plus_constant (size, extra));
3283 else if (GET_CODE (size) == REG && extra == 0)
3284 anti_adjust_stack (size);
3287 temp = copy_to_mode_reg (Pmode, size);
3289 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3290 temp, 0, OPTAB_LIB_WIDEN);
3291 anti_adjust_stack (temp);
3294 #ifndef STACK_GROWS_DOWNWARD
3300 temp = virtual_outgoing_args_rtx;
3301 if (extra != 0 && below)
3302 temp = plus_constant (temp, extra);
3306 if (GET_CODE (size) == CONST_INT)
3307 temp = plus_constant (virtual_outgoing_args_rtx,
3308 -INTVAL (size) - (below ? 0 : extra));
3309 else if (extra != 0 && !below)
3310 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3311 negate_rtx (Pmode, plus_constant (size, extra)));
3313 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3314 negate_rtx (Pmode, size));
3317 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3320 #ifdef PUSH_ROUNDING
3322 /* Emit single push insn. */
3325 emit_single_push_insn (enum machine_mode mode, rtx x, tree type)
3328 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3330 enum insn_code icode;
3331 insn_operand_predicate_fn pred;
3333 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3334 /* If there is push pattern, use it. Otherwise try old way of throwing
3335 MEM representing push operation to move expander. */
3336 icode = push_optab->handlers[(int) mode].insn_code;
3337 if (icode != CODE_FOR_nothing)
3339 if (((pred = insn_data[(int) icode].operand[0].predicate)
3340 && !((*pred) (x, mode))))
3341 x = force_reg (mode, x);
3342 emit_insn (GEN_FCN (icode) (x));
3345 if (GET_MODE_SIZE (mode) == rounded_size)
3346 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3347 /* If we are to pad downward, adjust the stack pointer first and
3348 then store X into the stack location using an offset. This is
3349 because emit_move_insn does not know how to pad; it does not have
3351 else if (FUNCTION_ARG_PADDING (mode, type) == downward)
3353 unsigned padding_size = rounded_size - GET_MODE_SIZE (mode);
3354 HOST_WIDE_INT offset;
3356 emit_move_insn (stack_pointer_rtx,
3357 expand_binop (Pmode,
3358 #ifdef STACK_GROWS_DOWNWARD
3364 GEN_INT (rounded_size),
3365 NULL_RTX, 0, OPTAB_LIB_WIDEN));
3367 offset = (HOST_WIDE_INT) padding_size;
3368 #ifdef STACK_GROWS_DOWNWARD
3369 if (STACK_PUSH_CODE == POST_DEC)
3370 /* We have already decremented the stack pointer, so get the
3372 offset += (HOST_WIDE_INT) rounded_size;
3374 if (STACK_PUSH_CODE == POST_INC)
3375 /* We have already incremented the stack pointer, so get the
3377 offset -= (HOST_WIDE_INT) rounded_size;
3379 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (offset));
3383 #ifdef STACK_GROWS_DOWNWARD
3384 /* ??? This seems wrong if STACK_PUSH_CODE == POST_DEC. */
3385 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3386 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3388 /* ??? This seems wrong if STACK_PUSH_CODE == POST_INC. */
3389 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3390 GEN_INT (rounded_size));
3392 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3395 dest = gen_rtx_MEM (mode, dest_addr);
3399 set_mem_attributes (dest, type, 1);
3401 if (flag_optimize_sibling_calls)
3402 /* Function incoming arguments may overlap with sibling call
3403 outgoing arguments and we cannot allow reordering of reads
3404 from function arguments with stores to outgoing arguments
3405 of sibling calls. */
3406 set_mem_alias_set (dest, 0);
3408 emit_move_insn (dest, x);
3412 /* Generate code to push X onto the stack, assuming it has mode MODE and
3414 MODE is redundant except when X is a CONST_INT (since they don't
3416 SIZE is an rtx for the size of data to be copied (in bytes),
3417 needed only if X is BLKmode.
3419 ALIGN (in bits) is maximum alignment we can assume.
3421 If PARTIAL and REG are both nonzero, then copy that many of the first
3422 words of X into registers starting with REG, and push the rest of X.
3423 The amount of space pushed is decreased by PARTIAL words,
3424 rounded *down* to a multiple of PARM_BOUNDARY.
3425 REG must be a hard register in this case.
3426 If REG is zero but PARTIAL is not, take any all others actions for an
3427 argument partially in registers, but do not actually load any
3430 EXTRA is the amount in bytes of extra space to leave next to this arg.
3431 This is ignored if an argument block has already been allocated.
3433 On a machine that lacks real push insns, ARGS_ADDR is the address of
3434 the bottom of the argument block for this call. We use indexing off there
3435 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3436 argument block has not been preallocated.
3438 ARGS_SO_FAR is the size of args previously pushed for this call.
3440 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3441 for arguments passed in registers. If nonzero, it will be the number
3442 of bytes required. */
3445 emit_push_insn (rtx x, enum machine_mode mode, tree type, rtx size,
3446 unsigned int align, int partial, rtx reg, int extra,
3447 rtx args_addr, rtx args_so_far, int reg_parm_stack_space,
3451 enum direction stack_direction
3452 #ifdef STACK_GROWS_DOWNWARD
3458 /* Decide where to pad the argument: `downward' for below,
3459 `upward' for above, or `none' for don't pad it.
3460 Default is below for small data on big-endian machines; else above. */
3461 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3463 /* Invert direction if stack is post-decrement.
3465 if (STACK_PUSH_CODE == POST_DEC)
3466 if (where_pad != none)
3467 where_pad = (where_pad == downward ? upward : downward);
3469 xinner = x = protect_from_queue (x, 0);
3471 if (mode == BLKmode)
3473 /* Copy a block into the stack, entirely or partially. */
3476 int used = partial * UNITS_PER_WORD;
3480 if (reg && GET_CODE (reg) == PARALLEL)
3482 /* Use the size of the elt to compute offset. */
3483 rtx elt = XEXP (XVECEXP (reg, 0, 0), 0);
3484 used = partial * GET_MODE_SIZE (GET_MODE (elt));
3485 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3488 offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3495 /* USED is now the # of bytes we need not copy to the stack
3496 because registers will take care of them. */
3499 xinner = adjust_address (xinner, BLKmode, used);
3501 /* If the partial register-part of the arg counts in its stack size,
3502 skip the part of stack space corresponding to the registers.
3503 Otherwise, start copying to the beginning of the stack space,
3504 by setting SKIP to 0. */
3505 skip = (reg_parm_stack_space == 0) ? 0 : used;
3507 #ifdef PUSH_ROUNDING
3508 /* Do it with several push insns if that doesn't take lots of insns
3509 and if there is no difficulty with push insns that skip bytes
3510 on the stack for alignment purposes. */
3513 && GET_CODE (size) == CONST_INT
3515 && MEM_ALIGN (xinner) >= align
3516 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3517 /* Here we avoid the case of a structure whose weak alignment
3518 forces many pushes of a small amount of data,
3519 and such small pushes do rounding that causes trouble. */
3520 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3521 || align >= BIGGEST_ALIGNMENT
3522 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3523 == (align / BITS_PER_UNIT)))
3524 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3526 /* Push padding now if padding above and stack grows down,
3527 or if padding below and stack grows up.
3528 But if space already allocated, this has already been done. */
3529 if (extra && args_addr == 0
3530 && where_pad != none && where_pad != stack_direction)
3531 anti_adjust_stack (GEN_INT (extra));
3533 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
3536 #endif /* PUSH_ROUNDING */
3540 /* Otherwise make space on the stack and copy the data
3541 to the address of that space. */
3543 /* Deduct words put into registers from the size we must copy. */
3546 if (GET_CODE (size) == CONST_INT)
3547 size = GEN_INT (INTVAL (size) - used);
3549 size = expand_binop (GET_MODE (size), sub_optab, size,
3550 GEN_INT (used), NULL_RTX, 0,
3554 /* Get the address of the stack space.
3555 In this case, we do not deal with EXTRA separately.
3556 A single stack adjust will do. */
3559 temp = push_block (size, extra, where_pad == downward);
3562 else if (GET_CODE (args_so_far) == CONST_INT)
3563 temp = memory_address (BLKmode,
3564 plus_constant (args_addr,
3565 skip + INTVAL (args_so_far)));
3567 temp = memory_address (BLKmode,
3568 plus_constant (gen_rtx_PLUS (Pmode,
3573 if (!ACCUMULATE_OUTGOING_ARGS)
3575 /* If the source is referenced relative to the stack pointer,
3576 copy it to another register to stabilize it. We do not need
3577 to do this if we know that we won't be changing sp. */
3579 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3580 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3581 temp = copy_to_reg (temp);
3584 target = gen_rtx_MEM (BLKmode, temp);
3588 set_mem_attributes (target, type, 1);
3589 /* Function incoming arguments may overlap with sibling call
3590 outgoing arguments and we cannot allow reordering of reads
3591 from function arguments with stores to outgoing arguments
3592 of sibling calls. */
3593 set_mem_alias_set (target, 0);
3596 /* ALIGN may well be better aligned than TYPE, e.g. due to
3597 PARM_BOUNDARY. Assume the caller isn't lying. */
3598 set_mem_align (target, align);
3600 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3603 else if (partial > 0)
3605 /* Scalar partly in registers. */
3607 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3610 /* # words of start of argument
3611 that we must make space for but need not store. */
3612 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3613 int args_offset = INTVAL (args_so_far);
3616 /* Push padding now if padding above and stack grows down,
3617 or if padding below and stack grows up.
3618 But if space already allocated, this has already been done. */
3619 if (extra && args_addr == 0
3620 && where_pad != none && where_pad != stack_direction)
3621 anti_adjust_stack (GEN_INT (extra));
3623 /* If we make space by pushing it, we might as well push
3624 the real data. Otherwise, we can leave OFFSET nonzero
3625 and leave the space uninitialized. */
3629 /* Now NOT_STACK gets the number of words that we don't need to
3630 allocate on the stack. */
3631 not_stack = partial - offset;
3633 /* If the partial register-part of the arg counts in its stack size,
3634 skip the part of stack space corresponding to the registers.
3635 Otherwise, start copying to the beginning of the stack space,
3636 by setting SKIP to 0. */
3637 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3639 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3640 x = validize_mem (force_const_mem (mode, x));
3642 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3643 SUBREGs of such registers are not allowed. */
3644 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3645 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3646 x = copy_to_reg (x);
3648 /* Loop over all the words allocated on the stack for this arg. */
3649 /* We can do it by words, because any scalar bigger than a word
3650 has a size a multiple of a word. */
3651 #ifndef PUSH_ARGS_REVERSED
3652 for (i = not_stack; i < size; i++)
3654 for (i = size - 1; i >= not_stack; i--)
3656 if (i >= not_stack + offset)
3657 emit_push_insn (operand_subword_force (x, i, mode),
3658 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3660 GEN_INT (args_offset + ((i - not_stack + skip)
3662 reg_parm_stack_space, alignment_pad);
3669 /* Push padding now if padding above and stack grows down,
3670 or if padding below and stack grows up.
3671 But if space already allocated, this has already been done. */
3672 if (extra && args_addr == 0
3673 && where_pad != none && where_pad != stack_direction)
3674 anti_adjust_stack (GEN_INT (extra));
3676 #ifdef PUSH_ROUNDING
3677 if (args_addr == 0 && PUSH_ARGS)
3678 emit_single_push_insn (mode, x, type);
3682 if (GET_CODE (args_so_far) == CONST_INT)
3684 = memory_address (mode,
3685 plus_constant (args_addr,
3686 INTVAL (args_so_far)));
3688 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3690 dest = gen_rtx_MEM (mode, addr);
3693 set_mem_attributes (dest, type, 1);
3694 /* Function incoming arguments may overlap with sibling call
3695 outgoing arguments and we cannot allow reordering of reads
3696 from function arguments with stores to outgoing arguments
3697 of sibling calls. */
3698 set_mem_alias_set (dest, 0);
3701 emit_move_insn (dest, x);
3705 /* If part should go in registers, copy that part
3706 into the appropriate registers. Do this now, at the end,
3707 since mem-to-mem copies above may do function calls. */
3708 if (partial > 0 && reg != 0)
3710 /* Handle calls that pass values in multiple non-contiguous locations.
3711 The Irix 6 ABI has examples of this. */
3712 if (GET_CODE (reg) == PARALLEL)
3713 emit_group_load (reg, x, type, -1);
3715 move_block_to_reg (REGNO (reg), x, partial, mode);
3718 if (extra && args_addr == 0 && where_pad == stack_direction)
3719 anti_adjust_stack (GEN_INT (extra));
3721 if (alignment_pad && args_addr == 0)
3722 anti_adjust_stack (alignment_pad);
3725 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3729 get_subtarget (rtx x)
3732 /* Only registers can be subtargets. */
3733 || GET_CODE (x) != REG
3734 /* If the register is readonly, it can't be set more than once. */
3735 || RTX_UNCHANGING_P (x)
3736 /* Don't use hard regs to avoid extending their life. */
3737 || REGNO (x) < FIRST_PSEUDO_REGISTER
3738 /* Avoid subtargets inside loops,
3739 since they hide some invariant expressions. */
3740 || preserve_subexpressions_p ())
3744 /* Expand an assignment that stores the value of FROM into TO.
3745 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3746 (This may contain a QUEUED rtx;
3747 if the value is constant, this rtx is a constant.)
3748 Otherwise, the returned value is NULL_RTX. */
3751 expand_assignment (tree to, tree from, int want_value)
3756 /* Don't crash if the lhs of the assignment was erroneous. */
3758 if (TREE_CODE (to) == ERROR_MARK)
3760 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3761 return want_value ? result : NULL_RTX;
3764 /* Assignment of a structure component needs special treatment
3765 if the structure component's rtx is not simply a MEM.
3766 Assignment of an array element at a constant index, and assignment of
3767 an array element in an unaligned packed structure field, has the same
3770 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3771 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF
3772 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
3774 enum machine_mode mode1;
3775 HOST_WIDE_INT bitsize, bitpos;
3783 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3784 &unsignedp, &volatilep);
3786 /* If we are going to use store_bit_field and extract_bit_field,
3787 make sure to_rtx will be safe for multiple use. */
3789 if (mode1 == VOIDmode && want_value)
3790 tem = stabilize_reference (tem);
3792 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
3796 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
3798 if (GET_CODE (to_rtx) != MEM)
3801 #ifdef POINTERS_EXTEND_UNSIGNED
3802 if (GET_MODE (offset_rtx) != Pmode)
3803 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
3805 if (GET_MODE (offset_rtx) != ptr_mode)
3806 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3809 /* A constant address in TO_RTX can have VOIDmode, we must not try
3810 to call force_reg for that case. Avoid that case. */
3811 if (GET_CODE (to_rtx) == MEM
3812 && GET_MODE (to_rtx) == BLKmode
3813 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
3815 && (bitpos % bitsize) == 0
3816 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3817 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
3819 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
3823 to_rtx = offset_address (to_rtx, offset_rtx,
3824 highest_pow2_factor_for_type (TREE_TYPE (to),
3828 if (GET_CODE (to_rtx) == MEM)
3830 /* If the field is at offset zero, we could have been given the
3831 DECL_RTX of the parent struct. Don't munge it. */
3832 to_rtx = shallow_copy_rtx (to_rtx);
3834 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
3837 /* Deal with volatile and readonly fields. The former is only done
3838 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
3839 if (volatilep && GET_CODE (to_rtx) == MEM)
3841 if (to_rtx == orig_to_rtx)
3842 to_rtx = copy_rtx (to_rtx);
3843 MEM_VOLATILE_P (to_rtx) = 1;
3846 if (TREE_CODE (to) == COMPONENT_REF
3847 && TREE_READONLY (TREE_OPERAND (to, 1))
3848 /* We can't assert that a MEM won't be set more than once
3849 if the component is not addressable because another
3850 non-addressable component may be referenced by the same MEM. */
3851 && ! (GET_CODE (to_rtx) == MEM && ! can_address_p (to)))
3853 if (to_rtx == orig_to_rtx)
3854 to_rtx = copy_rtx (to_rtx);
3855 RTX_UNCHANGING_P (to_rtx) = 1;
3858 if (GET_CODE (to_rtx) == MEM && ! can_address_p (to))
3860 if (to_rtx == orig_to_rtx)
3861 to_rtx = copy_rtx (to_rtx);
3862 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
3865 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3867 /* Spurious cast for HPUX compiler. */
3868 ? ((enum machine_mode)
3869 TYPE_MODE (TREE_TYPE (to)))
3871 unsignedp, TREE_TYPE (tem), get_alias_set (to));
3873 preserve_temp_slots (result);
3877 /* If the value is meaningful, convert RESULT to the proper mode.
3878 Otherwise, return nothing. */
3879 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3880 TYPE_MODE (TREE_TYPE (from)),
3882 TREE_UNSIGNED (TREE_TYPE (to)))
3886 /* If the rhs is a function call and its value is not an aggregate,
3887 call the function before we start to compute the lhs.
3888 This is needed for correct code for cases such as
3889 val = setjmp (buf) on machines where reference to val
3890 requires loading up part of an address in a separate insn.
3892 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3893 since it might be a promoted variable where the zero- or sign- extension
3894 needs to be done. Handling this in the normal way is safe because no
3895 computation is done before the call. */
3896 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from, from)
3897 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
3898 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3899 && GET_CODE (DECL_RTL (to)) == REG))
3904 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
3906 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3908 /* Handle calls that return values in multiple non-contiguous locations.
3909 The Irix 6 ABI has examples of this. */
3910 if (GET_CODE (to_rtx) == PARALLEL)
3911 emit_group_load (to_rtx, value, TREE_TYPE (from),
3912 int_size_in_bytes (TREE_TYPE (from)));
3913 else if (GET_MODE (to_rtx) == BLKmode)
3914 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
3917 if (POINTER_TYPE_P (TREE_TYPE (to)))
3918 value = convert_memory_address (GET_MODE (to_rtx), value);
3919 emit_move_insn (to_rtx, value);
3921 preserve_temp_slots (to_rtx);
3924 return want_value ? to_rtx : NULL_RTX;
3927 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3928 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3931 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
3933 /* Don't move directly into a return register. */
3934 if (TREE_CODE (to) == RESULT_DECL
3935 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
3940 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
3942 if (GET_CODE (to_rtx) == PARALLEL)
3943 emit_group_load (to_rtx, temp, TREE_TYPE (from),
3944 int_size_in_bytes (TREE_TYPE (from)));
3946 emit_move_insn (to_rtx, temp);
3948 preserve_temp_slots (to_rtx);
3951 return want_value ? to_rtx : NULL_RTX;
3954 /* In case we are returning the contents of an object which overlaps
3955 the place the value is being stored, use a safe function when copying
3956 a value through a pointer into a structure value return block. */
3957 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3958 && current_function_returns_struct
3959 && !current_function_returns_pcc_struct)
3964 size = expr_size (from);
3965 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
3967 if (TARGET_MEM_FUNCTIONS)
3968 emit_library_call (memmove_libfunc, LCT_NORMAL,
3969 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3970 XEXP (from_rtx, 0), Pmode,
3971 convert_to_mode (TYPE_MODE (sizetype),
3972 size, TREE_UNSIGNED (sizetype)),
3973 TYPE_MODE (sizetype));
3975 emit_library_call (bcopy_libfunc, LCT_NORMAL,
3976 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3977 XEXP (to_rtx, 0), Pmode,
3978 convert_to_mode (TYPE_MODE (integer_type_node),
3980 TREE_UNSIGNED (integer_type_node)),
3981 TYPE_MODE (integer_type_node));
3983 preserve_temp_slots (to_rtx);
3986 return want_value ? to_rtx : NULL_RTX;
3989 /* Compute FROM and store the value in the rtx we got. */
3992 result = store_expr (from, to_rtx, want_value);
3993 preserve_temp_slots (result);
3996 return want_value ? result : NULL_RTX;
3999 /* Generate code for computing expression EXP,
4000 and storing the value into TARGET.
4001 TARGET may contain a QUEUED rtx.
4003 If WANT_VALUE & 1 is nonzero, return a copy of the value
4004 not in TARGET, so that we can be sure to use the proper
4005 value in a containing expression even if TARGET has something
4006 else stored in it. If possible, we copy the value through a pseudo
4007 and return that pseudo. Or, if the value is constant, we try to
4008 return the constant. In some cases, we return a pseudo
4009 copied *from* TARGET.
4011 If the mode is BLKmode then we may return TARGET itself.
4012 It turns out that in BLKmode it doesn't cause a problem.
4013 because C has no operators that could combine two different
4014 assignments into the same BLKmode object with different values
4015 with no sequence point. Will other languages need this to
4018 If WANT_VALUE & 1 is 0, we return NULL, to make sure
4019 to catch quickly any cases where the caller uses the value
4020 and fails to set WANT_VALUE.
4022 If WANT_VALUE & 2 is set, this is a store into a call param on the
4023 stack, and block moves may need to be treated specially. */
4026 store_expr (tree exp, rtx target, int want_value)
4029 rtx alt_rtl = NULL_RTX;
4030 int dont_return_target = 0;
4031 int dont_store_target = 0;
4033 if (VOID_TYPE_P (TREE_TYPE (exp)))
4035 /* C++ can generate ?: expressions with a throw expression in one
4036 branch and an rvalue in the other. Here, we resolve attempts to
4037 store the throw expression's nonexistent result. */
4040 expand_expr (exp, const0_rtx, VOIDmode, 0);
4043 if (TREE_CODE (exp) == COMPOUND_EXPR)
4045 /* Perform first part of compound expression, then assign from second
4047 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4048 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4050 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4052 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4054 /* For conditional expression, get safe form of the target. Then
4055 test the condition, doing the appropriate assignment on either
4056 side. This avoids the creation of unnecessary temporaries.
4057 For non-BLKmode, it is more efficient not to do this. */
4059 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4062 target = protect_from_queue (target, 1);
4064 do_pending_stack_adjust ();
4066 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4067 start_cleanup_deferral ();
4068 store_expr (TREE_OPERAND (exp, 1), target, want_value & 2);
4069 end_cleanup_deferral ();
4071 emit_jump_insn (gen_jump (lab2));
4074 start_cleanup_deferral ();
4075 store_expr (TREE_OPERAND (exp, 2), target, want_value & 2);
4076 end_cleanup_deferral ();
4081 return want_value & 1 ? target : NULL_RTX;
4083 else if (queued_subexp_p (target))
4084 /* If target contains a postincrement, let's not risk
4085 using it as the place to generate the rhs. */
4087 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4089 /* Expand EXP into a new pseudo. */
4090 temp = gen_reg_rtx (GET_MODE (target));
4091 temp = expand_expr (exp, temp, GET_MODE (target),
4093 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4096 temp = expand_expr (exp, NULL_RTX, GET_MODE (target),
4098 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4100 /* If target is volatile, ANSI requires accessing the value
4101 *from* the target, if it is accessed. So make that happen.
4102 In no case return the target itself. */
4103 if (! MEM_VOLATILE_P (target) && (want_value & 1) != 0)
4104 dont_return_target = 1;
4106 else if ((want_value & 1) != 0
4107 && GET_CODE (target) == MEM
4108 && ! MEM_VOLATILE_P (target)
4109 && GET_MODE (target) != BLKmode)
4110 /* If target is in memory and caller wants value in a register instead,
4111 arrange that. Pass TARGET as target for expand_expr so that,
4112 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4113 We know expand_expr will not use the target in that case.
4114 Don't do this if TARGET is volatile because we are supposed
4115 to write it and then read it. */
4117 temp = expand_expr (exp, target, GET_MODE (target),
4118 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4119 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4121 /* If TEMP is already in the desired TARGET, only copy it from
4122 memory and don't store it there again. */
4124 || (rtx_equal_p (temp, target)
4125 && ! side_effects_p (temp) && ! side_effects_p (target)))
4126 dont_store_target = 1;
4127 temp = copy_to_reg (temp);
4129 dont_return_target = 1;
4131 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4132 /* If this is a scalar in a register that is stored in a wider mode
4133 than the declared mode, compute the result into its declared mode
4134 and then convert to the wider mode. Our value is the computed
4137 rtx inner_target = 0;
4139 /* If we don't want a value, we can do the conversion inside EXP,
4140 which will often result in some optimizations. Do the conversion
4141 in two steps: first change the signedness, if needed, then
4142 the extend. But don't do this if the type of EXP is a subtype
4143 of something else since then the conversion might involve
4144 more than just converting modes. */
4145 if ((want_value & 1) == 0
4146 && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4147 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4149 if (TREE_UNSIGNED (TREE_TYPE (exp))
4150 != SUBREG_PROMOTED_UNSIGNED_P (target))
4152 ((*lang_hooks.types.signed_or_unsigned_type)
4153 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4155 exp = convert ((*lang_hooks.types.type_for_mode)
4156 (GET_MODE (SUBREG_REG (target)),
4157 SUBREG_PROMOTED_UNSIGNED_P (target)),
4160 inner_target = SUBREG_REG (target);
4163 temp = expand_expr (exp, inner_target, VOIDmode,
4164 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
4166 /* If TEMP is a MEM and we want a result value, make the access
4167 now so it gets done only once. Strictly speaking, this is
4168 only necessary if the MEM is volatile, or if the address
4169 overlaps TARGET. But not performing the load twice also
4170 reduces the amount of rtl we generate and then have to CSE. */
4171 if (GET_CODE (temp) == MEM && (want_value & 1) != 0)
4172 temp = copy_to_reg (temp);
4174 /* If TEMP is a VOIDmode constant, use convert_modes to make
4175 sure that we properly convert it. */
4176 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4178 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4179 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4180 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4181 GET_MODE (target), temp,
4182 SUBREG_PROMOTED_UNSIGNED_P (target));
4185 convert_move (SUBREG_REG (target), temp,
4186 SUBREG_PROMOTED_UNSIGNED_P (target));
4188 /* If we promoted a constant, change the mode back down to match
4189 target. Otherwise, the caller might get confused by a result whose
4190 mode is larger than expected. */
4192 if ((want_value & 1) != 0 && GET_MODE (temp) != GET_MODE (target))
4194 if (GET_MODE (temp) != VOIDmode)
4196 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4197 SUBREG_PROMOTED_VAR_P (temp) = 1;
4198 SUBREG_PROMOTED_UNSIGNED_SET (temp,
4199 SUBREG_PROMOTED_UNSIGNED_P (target));
4202 temp = convert_modes (GET_MODE (target),
4203 GET_MODE (SUBREG_REG (target)),
4204 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4207 return want_value & 1 ? temp : NULL_RTX;
4211 temp = expand_expr_real (exp, target, GET_MODE (target),
4213 ? EXPAND_STACK_PARM : EXPAND_NORMAL),
4215 /* Return TARGET if it's a specified hardware register.
4216 If TARGET is a volatile mem ref, either return TARGET
4217 or return a reg copied *from* TARGET; ANSI requires this.
4219 Otherwise, if TEMP is not TARGET, return TEMP
4220 if it is constant (for efficiency),
4221 or if we really want the correct value. */
4222 if (!(target && GET_CODE (target) == REG
4223 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4224 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4225 && ! rtx_equal_p (temp, target)
4226 && (CONSTANT_P (temp) || (want_value & 1) != 0))
4227 dont_return_target = 1;
4230 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4231 the same as that of TARGET, adjust the constant. This is needed, for
4232 example, in case it is a CONST_DOUBLE and we want only a word-sized
4234 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4235 && TREE_CODE (exp) != ERROR_MARK
4236 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4237 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4238 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4240 /* If value was not generated in the target, store it there.
4241 Convert the value to TARGET's type first if necessary.
4242 If TEMP and TARGET compare equal according to rtx_equal_p, but
4243 one or both of them are volatile memory refs, we have to distinguish
4245 - expand_expr has used TARGET. In this case, we must not generate
4246 another copy. This can be detected by TARGET being equal according
4248 - expand_expr has not used TARGET - that means that the source just
4249 happens to have the same RTX form. Since temp will have been created
4250 by expand_expr, it will compare unequal according to == .
4251 We must generate a copy in this case, to reach the correct number
4252 of volatile memory references. */
4254 if ((! rtx_equal_p (temp, target)
4255 || (temp != target && (side_effects_p (temp)
4256 || side_effects_p (target))))
4257 && TREE_CODE (exp) != ERROR_MARK
4258 && ! dont_store_target
4259 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4260 but TARGET is not valid memory reference, TEMP will differ
4261 from TARGET although it is really the same location. */
4262 && !(alt_rtl && rtx_equal_p (alt_rtl, target))
4263 /* If there's nothing to copy, don't bother. Don't call expr_size
4264 unless necessary, because some front-ends (C++) expr_size-hook
4265 aborts on objects that are not supposed to be bit-copied or
4267 && expr_size (exp) != const0_rtx)
4269 target = protect_from_queue (target, 1);
4270 if (GET_MODE (temp) != GET_MODE (target)
4271 && GET_MODE (temp) != VOIDmode)
4273 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4274 if (dont_return_target)
4276 /* In this case, we will return TEMP,
4277 so make sure it has the proper mode.
4278 But don't forget to store the value into TARGET. */
4279 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4280 emit_move_insn (target, temp);
4283 convert_move (target, temp, unsignedp);
4286 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4288 /* Handle copying a string constant into an array. The string
4289 constant may be shorter than the array. So copy just the string's
4290 actual length, and clear the rest. First get the size of the data
4291 type of the string, which is actually the size of the target. */
4292 rtx size = expr_size (exp);
4294 if (GET_CODE (size) == CONST_INT
4295 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4296 emit_block_move (target, temp, size,
4298 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4301 /* Compute the size of the data to copy from the string. */
4303 = size_binop (MIN_EXPR,
4304 make_tree (sizetype, size),
4305 size_int (TREE_STRING_LENGTH (exp)));
4307 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4309 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
4312 /* Copy that much. */
4313 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4314 TREE_UNSIGNED (sizetype));
4315 emit_block_move (target, temp, copy_size_rtx,
4317 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4319 /* Figure out how much is left in TARGET that we have to clear.
4320 Do all calculations in ptr_mode. */
4321 if (GET_CODE (copy_size_rtx) == CONST_INT)
4323 size = plus_constant (size, -INTVAL (copy_size_rtx));
4324 target = adjust_address (target, BLKmode,
4325 INTVAL (copy_size_rtx));
4329 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4330 copy_size_rtx, NULL_RTX, 0,
4333 #ifdef POINTERS_EXTEND_UNSIGNED
4334 if (GET_MODE (copy_size_rtx) != Pmode)
4335 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4336 TREE_UNSIGNED (sizetype));
4339 target = offset_address (target, copy_size_rtx,
4340 highest_pow2_factor (copy_size));
4341 label = gen_label_rtx ();
4342 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4343 GET_MODE (size), 0, label);
4346 if (size != const0_rtx)
4347 clear_storage (target, size);
4353 /* Handle calls that return values in multiple non-contiguous locations.
4354 The Irix 6 ABI has examples of this. */
4355 else if (GET_CODE (target) == PARALLEL)
4356 emit_group_load (target, temp, TREE_TYPE (exp),
4357 int_size_in_bytes (TREE_TYPE (exp)));
4358 else if (GET_MODE (temp) == BLKmode)
4359 emit_block_move (target, temp, expr_size (exp),
4361 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
4363 emit_move_insn (target, temp);
4366 /* If we don't want a value, return NULL_RTX. */
4367 if ((want_value & 1) == 0)
4370 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4371 ??? The latter test doesn't seem to make sense. */
4372 else if (dont_return_target && GET_CODE (temp) != MEM)
4375 /* Return TARGET itself if it is a hard register. */
4376 else if ((want_value & 1) != 0
4377 && GET_MODE (target) != BLKmode
4378 && ! (GET_CODE (target) == REG
4379 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4380 return copy_to_reg (target);
4386 /* Return 1 if EXP just contains zeros. FIXME merge with initializer_zerop. */
4389 is_zeros_p (tree exp)
4393 switch (TREE_CODE (exp))
4397 case NON_LVALUE_EXPR:
4398 case VIEW_CONVERT_EXPR:
4399 return is_zeros_p (TREE_OPERAND (exp, 0));
4402 return integer_zerop (exp);
4406 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4409 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4412 for (elt = TREE_VECTOR_CST_ELTS (exp); elt;
4413 elt = TREE_CHAIN (elt))
4414 if (!is_zeros_p (TREE_VALUE (elt)))
4420 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4421 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4422 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4423 if (! is_zeros_p (TREE_VALUE (elt)))
4433 /* Return 1 if EXP contains mostly (3/4) zeros. */
4436 mostly_zeros_p (tree exp)
4438 if (TREE_CODE (exp) == CONSTRUCTOR)
4440 int elts = 0, zeros = 0;
4441 tree elt = CONSTRUCTOR_ELTS (exp);
4442 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4444 /* If there are no ranges of true bits, it is all zero. */
4445 return elt == NULL_TREE;
4447 for (; elt; elt = TREE_CHAIN (elt))
4449 /* We do not handle the case where the index is a RANGE_EXPR,
4450 so the statistic will be somewhat inaccurate.
4451 We do make a more accurate count in store_constructor itself,
4452 so since this function is only used for nested array elements,
4453 this should be close enough. */
4454 if (mostly_zeros_p (TREE_VALUE (elt)))
4459 return 4 * zeros >= 3 * elts;
4462 return is_zeros_p (exp);
4465 /* Helper function for store_constructor.
4466 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4467 TYPE is the type of the CONSTRUCTOR, not the element type.
4468 CLEARED is as for store_constructor.
4469 ALIAS_SET is the alias set to use for any stores.
4471 This provides a recursive shortcut back to store_constructor when it isn't
4472 necessary to go through store_field. This is so that we can pass through
4473 the cleared field to let store_constructor know that we may not have to
4474 clear a substructure if the outer structure has already been cleared. */
4477 store_constructor_field (rtx target, unsigned HOST_WIDE_INT bitsize,
4478 HOST_WIDE_INT bitpos, enum machine_mode mode,
4479 tree exp, tree type, int cleared, int alias_set)
4481 if (TREE_CODE (exp) == CONSTRUCTOR
4482 && bitpos % BITS_PER_UNIT == 0
4483 /* If we have a nonzero bitpos for a register target, then we just
4484 let store_field do the bitfield handling. This is unlikely to
4485 generate unnecessary clear instructions anyways. */
4486 && (bitpos == 0 || GET_CODE (target) == MEM))
4488 if (GET_CODE (target) == MEM)
4490 = adjust_address (target,
4491 GET_MODE (target) == BLKmode
4493 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4494 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4497 /* Update the alias set, if required. */
4498 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4499 && MEM_ALIAS_SET (target) != 0)
4501 target = copy_rtx (target);
4502 set_mem_alias_set (target, alias_set);
4505 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4508 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4512 /* Store the value of constructor EXP into the rtx TARGET.
4513 TARGET is either a REG or a MEM; we know it cannot conflict, since
4514 safe_from_p has been called.
4515 CLEARED is true if TARGET is known to have been zero'd.
4516 SIZE is the number of bytes of TARGET we are allowed to modify: this
4517 may not be the same as the size of EXP if we are assigning to a field
4518 which has been packed to exclude padding bits. */
4521 store_constructor (tree exp, rtx target, int cleared, HOST_WIDE_INT size)
4523 tree type = TREE_TYPE (exp);
4524 #ifdef WORD_REGISTER_OPERATIONS
4525 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4528 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4529 || TREE_CODE (type) == QUAL_UNION_TYPE)
4533 /* If size is zero or the target is already cleared, do nothing. */
4534 if (size == 0 || cleared)
4536 /* We either clear the aggregate or indicate the value is dead. */
4537 else if ((TREE_CODE (type) == UNION_TYPE
4538 || TREE_CODE (type) == QUAL_UNION_TYPE)
4539 && ! CONSTRUCTOR_ELTS (exp))
4540 /* If the constructor is empty, clear the union. */
4542 clear_storage (target, expr_size (exp));
4546 /* If we are building a static constructor into a register,
4547 set the initial value as zero so we can fold the value into
4548 a constant. But if more than one register is involved,
4549 this probably loses. */
4550 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4551 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4553 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4557 /* If the constructor has fewer fields than the structure
4558 or if we are initializing the structure to mostly zeros,
4559 clear the whole structure first. Don't do this if TARGET is a
4560 register whose mode size isn't equal to SIZE since clear_storage
4561 can't handle this case. */
4562 else if (((list_length (CONSTRUCTOR_ELTS (exp)) != fields_length (type))
4563 || mostly_zeros_p (exp))
4564 && (GET_CODE (target) != REG
4565 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4568 rtx xtarget = target;
4570 if (readonly_fields_p (type))
4572 xtarget = copy_rtx (xtarget);
4573 RTX_UNCHANGING_P (xtarget) = 1;
4576 clear_storage (xtarget, GEN_INT (size));
4581 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4583 /* Store each element of the constructor into
4584 the corresponding field of TARGET. */
4586 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4588 tree field = TREE_PURPOSE (elt);
4589 tree value = TREE_VALUE (elt);
4590 enum machine_mode mode;
4591 HOST_WIDE_INT bitsize;
4592 HOST_WIDE_INT bitpos = 0;
4594 rtx to_rtx = target;
4596 /* Just ignore missing fields.
4597 We cleared the whole structure, above,
4598 if any fields are missing. */
4602 if (cleared && is_zeros_p (value))
4605 if (host_integerp (DECL_SIZE (field), 1))
4606 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4610 mode = DECL_MODE (field);
4611 if (DECL_BIT_FIELD (field))
4614 offset = DECL_FIELD_OFFSET (field);
4615 if (host_integerp (offset, 0)
4616 && host_integerp (bit_position (field), 0))
4618 bitpos = int_bit_position (field);
4622 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4628 if (CONTAINS_PLACEHOLDER_P (offset))
4629 offset = build (WITH_RECORD_EXPR, sizetype,
4630 offset, make_tree (TREE_TYPE (exp), target));
4632 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4633 if (GET_CODE (to_rtx) != MEM)
4636 #ifdef POINTERS_EXTEND_UNSIGNED
4637 if (GET_MODE (offset_rtx) != Pmode)
4638 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
4640 if (GET_MODE (offset_rtx) != ptr_mode)
4641 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4644 to_rtx = offset_address (to_rtx, offset_rtx,
4645 highest_pow2_factor (offset));
4648 /* If the constructor has been cleared, setting RTX_UNCHANGING_P
4649 on the MEM might lead to scheduling the clearing after the
4651 if (TREE_READONLY (field) && !cleared)
4653 if (GET_CODE (to_rtx) == MEM)
4654 to_rtx = copy_rtx (to_rtx);
4656 RTX_UNCHANGING_P (to_rtx) = 1;
4659 #ifdef WORD_REGISTER_OPERATIONS
4660 /* If this initializes a field that is smaller than a word, at the
4661 start of a word, try to widen it to a full word.
4662 This special case allows us to output C++ member function
4663 initializations in a form that the optimizers can understand. */
4664 if (GET_CODE (target) == REG
4665 && bitsize < BITS_PER_WORD
4666 && bitpos % BITS_PER_WORD == 0
4667 && GET_MODE_CLASS (mode) == MODE_INT
4668 && TREE_CODE (value) == INTEGER_CST
4670 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4672 tree type = TREE_TYPE (value);
4674 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4676 type = (*lang_hooks.types.type_for_size)
4677 (BITS_PER_WORD, TREE_UNSIGNED (type));
4678 value = convert (type, value);
4681 if (BYTES_BIG_ENDIAN)
4683 = fold (build (LSHIFT_EXPR, type, value,
4684 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4685 bitsize = BITS_PER_WORD;
4690 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4691 && DECL_NONADDRESSABLE_P (field))
4693 to_rtx = copy_rtx (to_rtx);
4694 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4697 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4698 value, type, cleared,
4699 get_alias_set (TREE_TYPE (field)));
4702 else if (TREE_CODE (type) == ARRAY_TYPE
4703 || TREE_CODE (type) == VECTOR_TYPE)
4708 tree domain = TYPE_DOMAIN (type);
4709 tree elttype = TREE_TYPE (type);
4711 HOST_WIDE_INT minelt = 0;
4712 HOST_WIDE_INT maxelt = 0;
4716 unsigned n_elts = 0;
4718 /* Vectors are like arrays, but the domain is stored via an array
4720 if (TREE_CODE (type) == VECTOR_TYPE)
4722 /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
4723 the same field as TYPE_DOMAIN, we are not guaranteed that
4725 domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
4726 domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
4727 if (REG_P (target) && VECTOR_MODE_P (GET_MODE (target)))
4729 enum machine_mode mode = GET_MODE (target);
4731 icode = (int) vec_init_optab->handlers[mode].insn_code;
4732 if (icode != CODE_FOR_nothing)
4736 elt_size = GET_MODE_SIZE (GET_MODE_INNER (mode));
4737 n_elts = (GET_MODE_SIZE (mode) / elt_size);
4738 vector = alloca (n_elts);
4739 for (i = 0; i < n_elts; i++)
4740 vector [i] = CONST0_RTX (GET_MODE_INNER (mode));
4745 const_bounds_p = (TYPE_MIN_VALUE (domain)
4746 && TYPE_MAX_VALUE (domain)
4747 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4748 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4750 /* If we have constant bounds for the range of the type, get them. */
4753 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4754 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4757 /* If the constructor has fewer elements than the array,
4758 clear the whole array first. Similarly if this is
4759 static constructor of a non-BLKmode object. */
4760 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4764 HOST_WIDE_INT count = 0, zero_count = 0;
4765 need_to_clear = ! const_bounds_p;
4767 /* This loop is a more accurate version of the loop in
4768 mostly_zeros_p (it handles RANGE_EXPR in an index).
4769 It is also needed to check for missing elements. */
4770 for (elt = CONSTRUCTOR_ELTS (exp);
4771 elt != NULL_TREE && ! need_to_clear;
4772 elt = TREE_CHAIN (elt))
4774 tree index = TREE_PURPOSE (elt);
4775 HOST_WIDE_INT this_node_count;
4777 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4779 tree lo_index = TREE_OPERAND (index, 0);
4780 tree hi_index = TREE_OPERAND (index, 1);
4782 if (! host_integerp (lo_index, 1)
4783 || ! host_integerp (hi_index, 1))
4789 this_node_count = (tree_low_cst (hi_index, 1)
4790 - tree_low_cst (lo_index, 1) + 1);
4793 this_node_count = 1;
4795 count += this_node_count;
4796 if (mostly_zeros_p (TREE_VALUE (elt)))
4797 zero_count += this_node_count;
4800 /* Clear the entire array first if there are any missing elements,
4801 or if the incidence of zero elements is >= 75%. */
4803 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4807 if (need_to_clear && size > 0 && !vector)
4812 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4814 clear_storage (target, GEN_INT (size));
4818 else if (REG_P (target))
4819 /* Inform later passes that the old value is dead. */
4820 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4822 /* Store each element of the constructor into
4823 the corresponding element of TARGET, determined
4824 by counting the elements. */
4825 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4827 elt = TREE_CHAIN (elt), i++)
4829 enum machine_mode mode;
4830 HOST_WIDE_INT bitsize;
4831 HOST_WIDE_INT bitpos;
4833 tree value = TREE_VALUE (elt);
4834 tree index = TREE_PURPOSE (elt);
4835 rtx xtarget = target;
4837 if (cleared && is_zeros_p (value))
4840 unsignedp = TREE_UNSIGNED (elttype);
4841 mode = TYPE_MODE (elttype);
4842 if (mode == BLKmode)
4843 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4844 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4847 bitsize = GET_MODE_BITSIZE (mode);
4849 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4851 tree lo_index = TREE_OPERAND (index, 0);
4852 tree hi_index = TREE_OPERAND (index, 1);
4853 rtx index_r, pos_rtx, loop_end;
4854 struct nesting *loop;
4855 HOST_WIDE_INT lo, hi, count;
4861 /* If the range is constant and "small", unroll the loop. */
4863 && host_integerp (lo_index, 0)
4864 && host_integerp (hi_index, 0)
4865 && (lo = tree_low_cst (lo_index, 0),
4866 hi = tree_low_cst (hi_index, 0),
4867 count = hi - lo + 1,
4868 (GET_CODE (target) != MEM
4870 || (host_integerp (TYPE_SIZE (elttype), 1)
4871 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4874 lo -= minelt; hi -= minelt;
4875 for (; lo <= hi; lo++)
4877 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
4879 if (GET_CODE (target) == MEM
4880 && !MEM_KEEP_ALIAS_SET_P (target)
4881 && TREE_CODE (type) == ARRAY_TYPE
4882 && TYPE_NONALIASED_COMPONENT (type))
4884 target = copy_rtx (target);
4885 MEM_KEEP_ALIAS_SET_P (target) = 1;
4888 store_constructor_field
4889 (target, bitsize, bitpos, mode, value, type, cleared,
4890 get_alias_set (elttype));
4895 expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4896 loop_end = gen_label_rtx ();
4898 unsignedp = TREE_UNSIGNED (domain);
4900 index = build_decl (VAR_DECL, NULL_TREE, domain);
4903 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4905 SET_DECL_RTL (index, index_r);
4906 if (TREE_CODE (value) == SAVE_EXPR
4907 && SAVE_EXPR_RTL (value) == 0)
4909 /* Make sure value gets expanded once before the
4911 expand_expr (value, const0_rtx, VOIDmode, 0);
4914 store_expr (lo_index, index_r, 0);
4915 loop = expand_start_loop (0);
4917 /* Assign value to element index. */
4919 = convert (ssizetype,
4920 fold (build (MINUS_EXPR, TREE_TYPE (index),
4921 index, TYPE_MIN_VALUE (domain))));
4922 position = size_binop (MULT_EXPR, position,
4924 TYPE_SIZE_UNIT (elttype)));
4926 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
4927 xtarget = offset_address (target, pos_rtx,
4928 highest_pow2_factor (position));
4929 xtarget = adjust_address (xtarget, mode, 0);
4930 if (TREE_CODE (value) == CONSTRUCTOR)
4931 store_constructor (value, xtarget, cleared,
4932 bitsize / BITS_PER_UNIT);
4934 store_expr (value, xtarget, 0);
4936 expand_exit_loop_if_false (loop,
4937 build (LT_EXPR, integer_type_node,
4940 expand_increment (build (PREINCREMENT_EXPR,
4942 index, integer_one_node), 0, 0);
4944 emit_label (loop_end);
4947 else if ((index != 0 && ! host_integerp (index, 0))
4948 || ! host_integerp (TYPE_SIZE (elttype), 1))
4956 index = ssize_int (1);
4959 index = convert (ssizetype,
4960 fold (build (MINUS_EXPR, index,
4961 TYPE_MIN_VALUE (domain))));
4963 position = size_binop (MULT_EXPR, index,
4965 TYPE_SIZE_UNIT (elttype)));
4966 xtarget = offset_address (target,
4967 expand_expr (position, 0, VOIDmode, 0),
4968 highest_pow2_factor (position));
4969 xtarget = adjust_address (xtarget, mode, 0);
4970 store_expr (value, xtarget, 0);
4977 pos = tree_low_cst (index, 0) - minelt;
4980 vector[pos] = expand_expr (value, NULL_RTX, VOIDmode, 0);
4985 bitpos = ((tree_low_cst (index, 0) - minelt)
4986 * tree_low_cst (TYPE_SIZE (elttype), 1));
4988 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4990 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
4991 && TREE_CODE (type) == ARRAY_TYPE
4992 && TYPE_NONALIASED_COMPONENT (type))
4994 target = copy_rtx (target);
4995 MEM_KEEP_ALIAS_SET_P (target) = 1;
4997 store_constructor_field (target, bitsize, bitpos, mode, value,
4998 type, cleared, get_alias_set (elttype));
5003 emit_insn (GEN_FCN (icode) (target,
5004 gen_rtx_PARALLEL (GET_MODE (target),
5005 gen_rtvec_v (n_elts, vector))));
5009 /* Set constructor assignments. */
5010 else if (TREE_CODE (type) == SET_TYPE)
5012 tree elt = CONSTRUCTOR_ELTS (exp);
5013 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
5014 tree domain = TYPE_DOMAIN (type);
5015 tree domain_min, domain_max, bitlength;
5017 /* The default implementation strategy is to extract the constant
5018 parts of the constructor, use that to initialize the target,
5019 and then "or" in whatever non-constant ranges we need in addition.
5021 If a large set is all zero or all ones, it is
5022 probably better to set it using memset (if available) or bzero.
5023 Also, if a large set has just a single range, it may also be
5024 better to first clear all the first clear the set (using
5025 bzero/memset), and set the bits we want. */
5027 /* Check for all zeros. */
5028 if (elt == NULL_TREE && size > 0)
5031 clear_storage (target, GEN_INT (size));
5035 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
5036 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
5037 bitlength = size_binop (PLUS_EXPR,
5038 size_diffop (domain_max, domain_min),
5041 nbits = tree_low_cst (bitlength, 1);
5043 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
5044 are "complicated" (more than one range), initialize (the
5045 constant parts) by copying from a constant. */
5046 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
5047 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
5049 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
5050 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
5051 char *bit_buffer = alloca (nbits);
5052 HOST_WIDE_INT word = 0;
5053 unsigned int bit_pos = 0;
5054 unsigned int ibit = 0;
5055 unsigned int offset = 0; /* In bytes from beginning of set. */
5057 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
5060 if (bit_buffer[ibit])
5062 if (BYTES_BIG_ENDIAN)
5063 word |= (1 << (set_word_size - 1 - bit_pos));
5065 word |= 1 << bit_pos;
5069 if (bit_pos >= set_word_size || ibit == nbits)
5071 if (word != 0 || ! cleared)
5073 rtx datum = GEN_INT (word);
5076 /* The assumption here is that it is safe to use
5077 XEXP if the set is multi-word, but not if
5078 it's single-word. */
5079 if (GET_CODE (target) == MEM)
5080 to_rtx = adjust_address (target, mode, offset);
5081 else if (offset == 0)
5085 emit_move_insn (to_rtx, datum);
5092 offset += set_word_size / BITS_PER_UNIT;
5097 /* Don't bother clearing storage if the set is all ones. */
5098 if (TREE_CHAIN (elt) != NULL_TREE
5099 || (TREE_PURPOSE (elt) == NULL_TREE
5101 : ( ! host_integerp (TREE_VALUE (elt), 0)
5102 || ! host_integerp (TREE_PURPOSE (elt), 0)
5103 || (tree_low_cst (TREE_VALUE (elt), 0)
5104 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5105 != (HOST_WIDE_INT) nbits))))
5106 clear_storage (target, expr_size (exp));
5108 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5110 /* Start of range of element or NULL. */
5111 tree startbit = TREE_PURPOSE (elt);
5112 /* End of range of element, or element value. */
5113 tree endbit = TREE_VALUE (elt);
5114 HOST_WIDE_INT startb, endb;
5115 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5117 bitlength_rtx = expand_expr (bitlength,
5118 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5120 /* Handle non-range tuple element like [ expr ]. */
5121 if (startbit == NULL_TREE)
5123 startbit = save_expr (endbit);
5127 startbit = convert (sizetype, startbit);
5128 endbit = convert (sizetype, endbit);
5129 if (! integer_zerop (domain_min))
5131 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5132 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5134 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5135 EXPAND_CONST_ADDRESS);
5136 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5137 EXPAND_CONST_ADDRESS);
5143 ((build_qualified_type ((*lang_hooks.types.type_for_mode)
5144 (GET_MODE (target), 0),
5147 emit_move_insn (targetx, target);
5150 else if (GET_CODE (target) == MEM)
5155 /* Optimization: If startbit and endbit are constants divisible
5156 by BITS_PER_UNIT, call memset instead. */
5157 if (TARGET_MEM_FUNCTIONS
5158 && TREE_CODE (startbit) == INTEGER_CST
5159 && TREE_CODE (endbit) == INTEGER_CST
5160 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5161 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5163 emit_library_call (memset_libfunc, LCT_NORMAL,
5165 plus_constant (XEXP (targetx, 0),
5166 startb / BITS_PER_UNIT),
5168 constm1_rtx, TYPE_MODE (integer_type_node),
5169 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5170 TYPE_MODE (sizetype));
5173 emit_library_call (setbits_libfunc, LCT_NORMAL,
5174 VOIDmode, 4, XEXP (targetx, 0),
5175 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5176 startbit_rtx, TYPE_MODE (sizetype),
5177 endbit_rtx, TYPE_MODE (sizetype));
5180 emit_move_insn (target, targetx);
5188 /* Store the value of EXP (an expression tree)
5189 into a subfield of TARGET which has mode MODE and occupies
5190 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5191 If MODE is VOIDmode, it means that we are storing into a bit-field.
5193 If VALUE_MODE is VOIDmode, return nothing in particular.
5194 UNSIGNEDP is not used in this case.
5196 Otherwise, return an rtx for the value stored. This rtx
5197 has mode VALUE_MODE if that is convenient to do.
5198 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5200 TYPE is the type of the underlying object,
5202 ALIAS_SET is the alias set for the destination. This value will
5203 (in general) be different from that for TARGET, since TARGET is a
5204 reference to the containing structure. */
5207 store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
5208 enum machine_mode mode, tree exp, enum machine_mode value_mode,
5209 int unsignedp, tree type, int alias_set)
5211 HOST_WIDE_INT width_mask = 0;
5213 if (TREE_CODE (exp) == ERROR_MARK)
5216 /* If we have nothing to store, do nothing unless the expression has
5219 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5220 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
5221 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5223 /* If we are storing into an unaligned field of an aligned union that is
5224 in a register, we may have the mode of TARGET being an integer mode but
5225 MODE == BLKmode. In that case, get an aligned object whose size and
5226 alignment are the same as TARGET and store TARGET into it (we can avoid
5227 the store if the field being stored is the entire width of TARGET). Then
5228 call ourselves recursively to store the field into a BLKmode version of
5229 that object. Finally, load from the object into TARGET. This is not
5230 very efficient in general, but should only be slightly more expensive
5231 than the otherwise-required unaligned accesses. Perhaps this can be
5232 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5233 twice, once with emit_move_insn and once via store_field. */
5236 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5238 rtx object = assign_temp (type, 0, 1, 1);
5239 rtx blk_object = adjust_address (object, BLKmode, 0);
5241 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5242 emit_move_insn (object, target);
5244 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5247 emit_move_insn (target, object);
5249 /* We want to return the BLKmode version of the data. */
5253 if (GET_CODE (target) == CONCAT)
5255 /* We're storing into a struct containing a single __complex. */
5259 return store_expr (exp, target, 0);
5262 /* If the structure is in a register or if the component
5263 is a bit field, we cannot use addressing to access it.
5264 Use bit-field techniques or SUBREG to store in it. */
5266 if (mode == VOIDmode
5267 || (mode != BLKmode && ! direct_store[(int) mode]
5268 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5269 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5270 || GET_CODE (target) == REG
5271 || GET_CODE (target) == SUBREG
5272 /* If the field isn't aligned enough to store as an ordinary memref,
5273 store it as a bit field. */
5275 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5276 || bitpos % GET_MODE_ALIGNMENT (mode))
5277 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5278 || (bitpos % BITS_PER_UNIT != 0)))
5279 /* If the RHS and field are a constant size and the size of the
5280 RHS isn't the same size as the bitfield, we must use bitfield
5283 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5284 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5286 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5288 /* If BITSIZE is narrower than the size of the type of EXP
5289 we will be narrowing TEMP. Normally, what's wanted are the
5290 low-order bits. However, if EXP's type is a record and this is
5291 big-endian machine, we want the upper BITSIZE bits. */
5292 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5293 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5294 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5295 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5296 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5300 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5302 if (mode != VOIDmode && mode != BLKmode
5303 && mode != TYPE_MODE (TREE_TYPE (exp)))
5304 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5306 /* If the modes of TARGET and TEMP are both BLKmode, both
5307 must be in memory and BITPOS must be aligned on a byte
5308 boundary. If so, we simply do a block copy. */
5309 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5311 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5312 || bitpos % BITS_PER_UNIT != 0)
5315 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5316 emit_block_move (target, temp,
5317 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5321 return value_mode == VOIDmode ? const0_rtx : target;
5324 /* Store the value in the bitfield. */
5325 store_bit_field (target, bitsize, bitpos, mode, temp,
5326 int_size_in_bytes (type));
5328 if (value_mode != VOIDmode)
5330 /* The caller wants an rtx for the value.
5331 If possible, avoid refetching from the bitfield itself. */
5333 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5336 enum machine_mode tmode;
5338 tmode = GET_MODE (temp);
5339 if (tmode == VOIDmode)
5343 return expand_and (tmode, temp,
5344 gen_int_mode (width_mask, tmode),
5347 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5348 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5349 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5352 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5353 NULL_RTX, value_mode, VOIDmode,
5354 int_size_in_bytes (type));
5360 rtx addr = XEXP (target, 0);
5361 rtx to_rtx = target;
5363 /* If a value is wanted, it must be the lhs;
5364 so make the address stable for multiple use. */
5366 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5367 && ! CONSTANT_ADDRESS_P (addr)
5368 /* A frame-pointer reference is already stable. */
5369 && ! (GET_CODE (addr) == PLUS
5370 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5371 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5372 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5373 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5375 /* Now build a reference to just the desired component. */
5377 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5379 if (to_rtx == target)
5380 to_rtx = copy_rtx (to_rtx);
5382 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5383 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5384 set_mem_alias_set (to_rtx, alias_set);
5386 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5390 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5391 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5392 codes and find the ultimate containing object, which we return.
5394 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5395 bit position, and *PUNSIGNEDP to the signedness of the field.
5396 If the position of the field is variable, we store a tree
5397 giving the variable offset (in units) in *POFFSET.
5398 This offset is in addition to the bit position.
5399 If the position is not variable, we store 0 in *POFFSET.
5401 If any of the extraction expressions is volatile,
5402 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5404 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5405 is a mode that can be used to access the field. In that case, *PBITSIZE
5408 If the field describes a variable-sized object, *PMODE is set to
5409 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5410 this case, but the address of the object can be found. */
5413 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
5414 HOST_WIDE_INT *pbitpos, tree *poffset,
5415 enum machine_mode *pmode, int *punsignedp,
5419 enum machine_mode mode = VOIDmode;
5420 tree offset = size_zero_node;
5421 tree bit_offset = bitsize_zero_node;
5422 tree placeholder_ptr = 0;
5425 /* First get the mode, signedness, and size. We do this from just the
5426 outermost expression. */
5427 if (TREE_CODE (exp) == COMPONENT_REF)
5429 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5430 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5431 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5433 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5435 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5437 size_tree = TREE_OPERAND (exp, 1);
5438 *punsignedp = TREE_UNSIGNED (exp);
5442 mode = TYPE_MODE (TREE_TYPE (exp));
5443 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5445 if (mode == BLKmode)
5446 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5448 *pbitsize = GET_MODE_BITSIZE (mode);
5453 if (! host_integerp (size_tree, 1))
5454 mode = BLKmode, *pbitsize = -1;
5456 *pbitsize = tree_low_cst (size_tree, 1);
5459 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5460 and find the ultimate containing object. */
5463 if (TREE_CODE (exp) == BIT_FIELD_REF)
5464 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5465 else if (TREE_CODE (exp) == COMPONENT_REF)
5467 tree field = TREE_OPERAND (exp, 1);
5468 tree this_offset = DECL_FIELD_OFFSET (field);
5470 /* If this field hasn't been filled in yet, don't go
5471 past it. This should only happen when folding expressions
5472 made during type construction. */
5473 if (this_offset == 0)
5475 else if (CONTAINS_PLACEHOLDER_P (this_offset))
5476 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5478 offset = size_binop (PLUS_EXPR, offset, this_offset);
5479 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5480 DECL_FIELD_BIT_OFFSET (field));
5482 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5485 else if (TREE_CODE (exp) == ARRAY_REF
5486 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5488 tree index = TREE_OPERAND (exp, 1);
5489 tree array = TREE_OPERAND (exp, 0);
5490 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5491 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5492 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5494 /* We assume all arrays have sizes that are a multiple of a byte.
5495 First subtract the lower bound, if any, in the type of the
5496 index, then convert to sizetype and multiply by the size of the
5498 if (low_bound != 0 && ! integer_zerop (low_bound))
5499 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5502 /* If the index has a self-referential type, pass it to a
5503 WITH_RECORD_EXPR; if the component size is, pass our
5504 component to one. */
5505 if (CONTAINS_PLACEHOLDER_P (index))
5506 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5507 if (CONTAINS_PLACEHOLDER_P (unit_size))
5508 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5510 offset = size_binop (PLUS_EXPR, offset,
5511 size_binop (MULT_EXPR,
5512 convert (sizetype, index),
5516 else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5518 tree new = find_placeholder (exp, &placeholder_ptr);
5520 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5521 We might have been called from tree optimization where we
5522 haven't set up an object yet. */
5531 /* We can go inside most conversions: all NON_VALUE_EXPRs, all normal
5532 conversions that don't change the mode, and all view conversions
5533 except those that need to "step up" the alignment. */
5534 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5535 && ! (TREE_CODE (exp) == VIEW_CONVERT_EXPR
5536 && ! ((TYPE_ALIGN (TREE_TYPE (exp))
5537 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5539 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5540 < BIGGEST_ALIGNMENT)
5541 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5542 || TYPE_ALIGN_OK (TREE_TYPE
5543 (TREE_OPERAND (exp, 0))))))
5544 && ! ((TREE_CODE (exp) == NOP_EXPR
5545 || TREE_CODE (exp) == CONVERT_EXPR)
5546 && (TYPE_MODE (TREE_TYPE (exp))
5547 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5550 /* If any reference in the chain is volatile, the effect is volatile. */
5551 if (TREE_THIS_VOLATILE (exp))
5554 exp = TREE_OPERAND (exp, 0);
5557 /* If OFFSET is constant, see if we can return the whole thing as a
5558 constant bit position. Otherwise, split it up. */
5559 if (host_integerp (offset, 0)
5560 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5562 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5563 && host_integerp (tem, 0))
5564 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5566 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5572 /* Return 1 if T is an expression that get_inner_reference handles. */
5575 handled_component_p (tree t)
5577 switch (TREE_CODE (t))
5582 case ARRAY_RANGE_REF:
5583 case NON_LVALUE_EXPR:
5584 case VIEW_CONVERT_EXPR:
5587 /* ??? Sure they are handled, but get_inner_reference may return
5588 a different PBITSIZE, depending upon whether the expression is
5589 wrapped up in a NOP_EXPR or not, e.g. for bitfields. */
5592 return (TYPE_MODE (TREE_TYPE (t))
5593 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5600 /* Given an rtx VALUE that may contain additions and multiplications, return
5601 an equivalent value that just refers to a register, memory, or constant.
5602 This is done by generating instructions to perform the arithmetic and
5603 returning a pseudo-register containing the value.
5605 The returned value may be a REG, SUBREG, MEM or constant. */
5608 force_operand (rtx value, rtx target)
5611 /* Use subtarget as the target for operand 0 of a binary operation. */
5612 rtx subtarget = get_subtarget (target);
5613 enum rtx_code code = GET_CODE (value);
5615 /* Check for a PIC address load. */
5616 if ((code == PLUS || code == MINUS)
5617 && XEXP (value, 0) == pic_offset_table_rtx
5618 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5619 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5620 || GET_CODE (XEXP (value, 1)) == CONST))
5623 subtarget = gen_reg_rtx (GET_MODE (value));
5624 emit_move_insn (subtarget, value);
5628 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5631 target = gen_reg_rtx (GET_MODE (value));
5632 convert_move (target, force_operand (XEXP (value, 0), NULL),
5633 code == ZERO_EXTEND);
5637 if (GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c')
5639 op2 = XEXP (value, 1);
5640 if (!CONSTANT_P (op2) && !(GET_CODE (op2) == REG && op2 != subtarget))
5642 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5645 op2 = negate_rtx (GET_MODE (value), op2);
5648 /* Check for an addition with OP2 a constant integer and our first
5649 operand a PLUS of a virtual register and something else. In that
5650 case, we want to emit the sum of the virtual register and the
5651 constant first and then add the other value. This allows virtual
5652 register instantiation to simply modify the constant rather than
5653 creating another one around this addition. */
5654 if (code == PLUS && GET_CODE (op2) == CONST_INT
5655 && GET_CODE (XEXP (value, 0)) == PLUS
5656 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5657 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5658 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5660 rtx temp = expand_simple_binop (GET_MODE (value), code,
5661 XEXP (XEXP (value, 0), 0), op2,
5662 subtarget, 0, OPTAB_LIB_WIDEN);
5663 return expand_simple_binop (GET_MODE (value), code, temp,
5664 force_operand (XEXP (XEXP (value,
5666 target, 0, OPTAB_LIB_WIDEN);
5669 op1 = force_operand (XEXP (value, 0), subtarget);
5670 op2 = force_operand (op2, NULL_RTX);
5674 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5676 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5677 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5678 target, 1, OPTAB_LIB_WIDEN);
5680 return expand_divmod (0,
5681 FLOAT_MODE_P (GET_MODE (value))
5682 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5683 GET_MODE (value), op1, op2, target, 0);
5686 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5690 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5694 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5698 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5699 target, 0, OPTAB_LIB_WIDEN);
5702 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5703 target, 1, OPTAB_LIB_WIDEN);
5706 if (GET_RTX_CLASS (code) == '1')
5708 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5709 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
5712 #ifdef INSN_SCHEDULING
5713 /* On machines that have insn scheduling, we want all memory reference to be
5714 explicit, so we need to deal with such paradoxical SUBREGs. */
5715 if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
5716 && (GET_MODE_SIZE (GET_MODE (value))
5717 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5719 = simplify_gen_subreg (GET_MODE (value),
5720 force_reg (GET_MODE (SUBREG_REG (value)),
5721 force_operand (SUBREG_REG (value),
5723 GET_MODE (SUBREG_REG (value)),
5724 SUBREG_BYTE (value));
5730 /* Subroutine of expand_expr: return nonzero iff there is no way that
5731 EXP can reference X, which is being modified. TOP_P is nonzero if this
5732 call is going to be used to determine whether we need a temporary
5733 for EXP, as opposed to a recursive call to this function.
5735 It is always safe for this routine to return zero since it merely
5736 searches for optimization opportunities. */
5739 safe_from_p (rtx x, tree exp, int top_p)
5743 static tree save_expr_list;
5746 /* If EXP has varying size, we MUST use a target since we currently
5747 have no way of allocating temporaries of variable size
5748 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5749 So we assume here that something at a higher level has prevented a
5750 clash. This is somewhat bogus, but the best we can do. Only
5751 do this when X is BLKmode and when we are at the top level. */
5752 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5753 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5754 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5755 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5756 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5758 && GET_MODE (x) == BLKmode)
5759 /* If X is in the outgoing argument area, it is always safe. */
5760 || (GET_CODE (x) == MEM
5761 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5762 || (GET_CODE (XEXP (x, 0)) == PLUS
5763 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5766 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5767 find the underlying pseudo. */
5768 if (GET_CODE (x) == SUBREG)
5771 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5775 /* A SAVE_EXPR might appear many times in the expression passed to the
5776 top-level safe_from_p call, and if it has a complex subexpression,
5777 examining it multiple times could result in a combinatorial explosion.
5778 E.g. on an Alpha running at least 200MHz, a Fortran testcase compiled
5779 with optimization took about 28 minutes to compile -- even though it was
5780 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5781 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5782 we have processed. Note that the only test of top_p was above. */
5791 rtn = safe_from_p (x, exp, 0);
5793 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5794 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5799 /* Now look at our tree code and possibly recurse. */
5800 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5803 exp_rtl = DECL_RTL_IF_SET (exp);
5810 if (TREE_CODE (exp) == TREE_LIST)
5814 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
5816 exp = TREE_CHAIN (exp);
5819 if (TREE_CODE (exp) != TREE_LIST)
5820 return safe_from_p (x, exp, 0);
5823 else if (TREE_CODE (exp) == ERROR_MARK)
5824 return 1; /* An already-visited SAVE_EXPR? */
5830 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
5835 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5839 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5840 the expression. If it is set, we conflict iff we are that rtx or
5841 both are in memory. Otherwise, we check all operands of the
5842 expression recursively. */
5844 switch (TREE_CODE (exp))
5847 /* If the operand is static or we are static, we can't conflict.
5848 Likewise if we don't conflict with the operand at all. */
5849 if (staticp (TREE_OPERAND (exp, 0))
5850 || TREE_STATIC (exp)
5851 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5854 /* Otherwise, the only way this can conflict is if we are taking
5855 the address of a DECL a that address if part of X, which is
5857 exp = TREE_OPERAND (exp, 0);
5860 if (!DECL_RTL_SET_P (exp)
5861 || GET_CODE (DECL_RTL (exp)) != MEM)
5864 exp_rtl = XEXP (DECL_RTL (exp), 0);
5869 if (GET_CODE (x) == MEM
5870 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5871 get_alias_set (exp)))
5876 /* Assume that the call will clobber all hard registers and
5878 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5879 || GET_CODE (x) == MEM)
5884 /* If a sequence exists, we would have to scan every instruction
5885 in the sequence to see if it was safe. This is probably not
5887 if (RTL_EXPR_SEQUENCE (exp))
5890 exp_rtl = RTL_EXPR_RTL (exp);
5893 case WITH_CLEANUP_EXPR:
5894 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
5897 case CLEANUP_POINT_EXPR:
5898 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5901 exp_rtl = SAVE_EXPR_RTL (exp);
5905 /* If we've already scanned this, don't do it again. Otherwise,
5906 show we've scanned it and record for clearing the flag if we're
5908 if (TREE_PRIVATE (exp))
5911 TREE_PRIVATE (exp) = 1;
5912 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
5914 TREE_PRIVATE (exp) = 0;
5918 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
5922 /* The only operand we look at is operand 1. The rest aren't
5923 part of the expression. */
5924 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
5930 /* If we have an rtx, we do not need to scan our operands. */
5934 nops = first_rtl_op (TREE_CODE (exp));
5935 for (i = 0; i < nops; i++)
5936 if (TREE_OPERAND (exp, i) != 0
5937 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
5940 /* If this is a language-specific tree code, it may require
5941 special handling. */
5942 if ((unsigned int) TREE_CODE (exp)
5943 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
5944 && !(*lang_hooks.safe_from_p) (x, exp))
5948 /* If we have an rtl, find any enclosed object. Then see if we conflict
5952 if (GET_CODE (exp_rtl) == SUBREG)
5954 exp_rtl = SUBREG_REG (exp_rtl);
5955 if (GET_CODE (exp_rtl) == REG
5956 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5960 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5961 are memory and they conflict. */
5962 return ! (rtx_equal_p (x, exp_rtl)
5963 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5964 && true_dependence (exp_rtl, VOIDmode, x,
5965 rtx_addr_varies_p)));
5968 /* If we reach here, it is safe. */
5972 /* Subroutine of expand_expr: return rtx if EXP is a
5973 variable or parameter; else return 0. */
5979 switch (TREE_CODE (exp))
5983 return DECL_RTL (exp);
5989 /* Return the highest power of two that EXP is known to be a multiple of.
5990 This is used in updating alignment of MEMs in array references. */
5992 static unsigned HOST_WIDE_INT
5993 highest_pow2_factor (tree exp)
5995 unsigned HOST_WIDE_INT c0, c1;
5997 switch (TREE_CODE (exp))
6000 /* We can find the lowest bit that's a one. If the low
6001 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6002 We need to handle this case since we can find it in a COND_EXPR,
6003 a MIN_EXPR, or a MAX_EXPR. If the constant overflows, we have an
6004 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6006 if (TREE_CONSTANT_OVERFLOW (exp))
6007 return BIGGEST_ALIGNMENT;
6010 /* Note: tree_low_cst is intentionally not used here,
6011 we don't care about the upper bits. */
6012 c0 = TREE_INT_CST_LOW (exp);
6014 return c0 ? c0 : BIGGEST_ALIGNMENT;
6018 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6019 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6020 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6021 return MIN (c0, c1);
6024 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6025 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6028 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6030 if (integer_pow2p (TREE_OPERAND (exp, 1))
6031 && host_integerp (TREE_OPERAND (exp, 1), 1))
6033 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6034 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6035 return MAX (1, c0 / c1);
6039 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6040 case SAVE_EXPR: case WITH_RECORD_EXPR:
6041 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6044 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6047 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6048 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6049 return MIN (c0, c1);
6058 /* Similar, except that it is known that the expression must be a multiple
6059 of the alignment of TYPE. */
6061 static unsigned HOST_WIDE_INT
6062 highest_pow2_factor_for_type (tree type, tree exp)
6064 unsigned HOST_WIDE_INT type_align, factor;
6066 factor = highest_pow2_factor (exp);
6067 type_align = TYPE_ALIGN (type) / BITS_PER_UNIT;
6068 return MAX (factor, type_align);
6071 /* Return an object on the placeholder list that matches EXP, a
6072 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
6073 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
6074 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
6075 is a location which initially points to a starting location in the
6076 placeholder list (zero means start of the list) and where a pointer into
6077 the placeholder list at which the object is found is placed. */
6080 find_placeholder (tree exp, tree *plist)
6082 tree type = TREE_TYPE (exp);
6083 tree placeholder_expr;
6085 for (placeholder_expr
6086 = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
6087 placeholder_expr != 0;
6088 placeholder_expr = TREE_CHAIN (placeholder_expr))
6090 tree need_type = TYPE_MAIN_VARIANT (type);
6093 /* Find the outermost reference that is of the type we want. If none,
6094 see if any object has a type that is a pointer to the type we
6096 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6097 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
6098 || TREE_CODE (elt) == COND_EXPR)
6099 ? TREE_OPERAND (elt, 1)
6100 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6101 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6102 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6103 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6104 ? TREE_OPERAND (elt, 0) : 0))
6105 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6108 *plist = placeholder_expr;
6112 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6114 = ((TREE_CODE (elt) == COMPOUND_EXPR
6115 || TREE_CODE (elt) == COND_EXPR)
6116 ? TREE_OPERAND (elt, 1)
6117 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6118 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6119 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6120 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6121 ? TREE_OPERAND (elt, 0) : 0))
6122 if (POINTER_TYPE_P (TREE_TYPE (elt))
6123 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6127 *plist = placeholder_expr;
6128 return build1 (INDIRECT_REF, need_type, elt);
6135 /* Subroutine of expand_expr. Expand the two operands of a binary
6136 expression EXP0 and EXP1 placing the results in OP0 and OP1.
6137 The value may be stored in TARGET if TARGET is nonzero. The
6138 MODIFIER argument is as documented by expand_expr. */
6141 expand_operands (tree exp0, tree exp1, rtx target, rtx *op0, rtx *op1,
6142 enum expand_modifier modifier)
6144 if (! safe_from_p (target, exp1, 1))
6146 if (operand_equal_p (exp0, exp1, 0))
6148 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6149 *op1 = copy_rtx (*op0);
6153 /* If we need to preserve evaluation order, copy exp0 into its own
6154 temporary variable so that it can't be clobbered by exp1. */
6155 if (flag_evaluation_order && TREE_SIDE_EFFECTS (exp1))
6156 exp0 = save_expr (exp0);
6157 *op0 = expand_expr (exp0, target, VOIDmode, modifier);
6158 *op1 = expand_expr (exp1, NULL_RTX, VOIDmode, modifier);
6163 /* expand_expr: generate code for computing expression EXP.
6164 An rtx for the computed value is returned. The value is never null.
6165 In the case of a void EXP, const0_rtx is returned.
6167 The value may be stored in TARGET if TARGET is nonzero.
6168 TARGET is just a suggestion; callers must assume that
6169 the rtx returned may not be the same as TARGET.
6171 If TARGET is CONST0_RTX, it means that the value will be ignored.
6173 If TMODE is not VOIDmode, it suggests generating the
6174 result in mode TMODE. But this is done only when convenient.
6175 Otherwise, TMODE is ignored and the value generated in its natural mode.
6176 TMODE is just a suggestion; callers must assume that
6177 the rtx returned may not have mode TMODE.
6179 Note that TARGET may have neither TMODE nor MODE. In that case, it
6180 probably will not be used.
6182 If MODIFIER is EXPAND_SUM then when EXP is an addition
6183 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6184 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6185 products as above, or REG or MEM, or constant.
6186 Ordinarily in such cases we would output mul or add instructions
6187 and then return a pseudo reg containing the sum.
6189 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6190 it also marks a label as absolutely required (it can't be dead).
6191 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6192 This is used for outputting expressions used in initializers.
6194 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6195 with a constant address even if that address is not normally legitimate.
6196 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6198 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6199 a call parameter. Such targets require special care as we haven't yet
6200 marked TARGET so that it's safe from being trashed by libcalls. We
6201 don't want to use TARGET for anything but the final result;
6202 Intermediate values must go elsewhere. Additionally, calls to
6203 emit_block_move will be flagged with BLOCK_OP_CALL_PARM.
6205 If EXP is a VAR_DECL whose DECL_RTL was a MEM with an invalid
6206 address, and ALT_RTL is non-NULL, then *ALT_RTL is set to the
6207 DECL_RTL of the VAR_DECL. *ALT_RTL is also set if EXP is a
6208 COMPOUND_EXPR whose second argument is such a VAR_DECL, and so on
6212 expand_expr_real (tree exp, rtx target, enum machine_mode tmode,
6213 enum expand_modifier modifier, rtx *alt_rtl)
6216 tree type = TREE_TYPE (exp);
6217 int unsignedp = TREE_UNSIGNED (type);
6218 enum machine_mode mode;
6219 enum tree_code code = TREE_CODE (exp);
6221 rtx subtarget, original_target;
6225 /* Handle ERROR_MARK before anybody tries to access its type. */
6226 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6228 op0 = CONST0_RTX (tmode);
6234 mode = TYPE_MODE (type);
6235 /* Use subtarget as the target for operand 0 of a binary operation. */
6236 subtarget = get_subtarget (target);
6237 original_target = target;
6238 ignore = (target == const0_rtx
6239 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6240 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6241 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
6242 && TREE_CODE (type) == VOID_TYPE));
6244 /* If we are going to ignore this result, we need only do something
6245 if there is a side-effect somewhere in the expression. If there
6246 is, short-circuit the most common cases here. Note that we must
6247 not call expand_expr with anything but const0_rtx in case this
6248 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6252 if (! TREE_SIDE_EFFECTS (exp))
6255 /* Ensure we reference a volatile object even if value is ignored, but
6256 don't do this if all we are doing is taking its address. */
6257 if (TREE_THIS_VOLATILE (exp)
6258 && TREE_CODE (exp) != FUNCTION_DECL
6259 && mode != VOIDmode && mode != BLKmode
6260 && modifier != EXPAND_CONST_ADDRESS)
6262 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6263 if (GET_CODE (temp) == MEM)
6264 temp = copy_to_reg (temp);
6268 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6269 || code == INDIRECT_REF || code == BUFFER_REF)
6270 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6273 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6274 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6276 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6277 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6280 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6281 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6282 /* If the second operand has no side effects, just evaluate
6284 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6286 else if (code == BIT_FIELD_REF)
6288 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6289 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6290 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6297 /* If will do cse, generate all results into pseudo registers
6298 since 1) that allows cse to find more things
6299 and 2) otherwise cse could produce an insn the machine
6300 cannot support. An exception is a CONSTRUCTOR into a multi-word
6301 MEM: that's much more likely to be most efficient into the MEM.
6302 Another is a CALL_EXPR which must return in memory. */
6304 if (! cse_not_expected && mode != BLKmode && target
6305 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
6306 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
6307 && ! (code == CALL_EXPR && aggregate_value_p (exp, exp)))
6314 tree function = decl_function_context (exp);
6315 /* Labels in containing functions, or labels used from initializers,
6317 if (modifier == EXPAND_INITIALIZER
6318 || (function != current_function_decl
6319 && function != inline_function_decl
6321 temp = force_label_rtx (exp);
6323 temp = label_rtx (exp);
6325 temp = gen_rtx_MEM (FUNCTION_MODE, gen_rtx_LABEL_REF (Pmode, temp));
6326 if (function != current_function_decl
6327 && function != inline_function_decl && function != 0)
6328 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6333 if (!DECL_RTL_SET_P (exp))
6335 error ("%Jprior parameter's size depends on '%D'", exp, exp);
6336 return CONST0_RTX (mode);
6339 /* ... fall through ... */
6342 /* If a static var's type was incomplete when the decl was written,
6343 but the type is complete now, lay out the decl now. */
6344 if (DECL_SIZE (exp) == 0
6345 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
6346 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6347 layout_decl (exp, 0);
6349 /* ... fall through ... */
6353 if (DECL_RTL (exp) == 0)
6356 /* Ensure variable marked as used even if it doesn't go through
6357 a parser. If it hasn't be used yet, write out an external
6359 if (! TREE_USED (exp))
6361 assemble_external (exp);
6362 TREE_USED (exp) = 1;
6365 /* Show we haven't gotten RTL for this yet. */
6368 /* Handle variables inherited from containing functions. */
6369 context = decl_function_context (exp);
6371 /* We treat inline_function_decl as an alias for the current function
6372 because that is the inline function whose vars, types, etc.
6373 are being merged into the current function.
6374 See expand_inline_function. */
6376 if (context != 0 && context != current_function_decl
6377 && context != inline_function_decl
6378 /* If var is static, we don't need a static chain to access it. */
6379 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6380 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6384 /* Mark as non-local and addressable. */
6385 DECL_NONLOCAL (exp) = 1;
6386 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6388 (*lang_hooks.mark_addressable) (exp);
6389 if (GET_CODE (DECL_RTL (exp)) != MEM)
6391 addr = XEXP (DECL_RTL (exp), 0);
6392 if (GET_CODE (addr) == MEM)
6394 = replace_equiv_address (addr,
6395 fix_lexical_addr (XEXP (addr, 0), exp));
6397 addr = fix_lexical_addr (addr, exp);
6399 temp = replace_equiv_address (DECL_RTL (exp), addr);
6402 /* This is the case of an array whose size is to be determined
6403 from its initializer, while the initializer is still being parsed.
6406 else if (GET_CODE (DECL_RTL (exp)) == MEM
6407 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6408 temp = validize_mem (DECL_RTL (exp));
6410 /* If DECL_RTL is memory, we are in the normal case and either
6411 the address is not valid or it is not a register and -fforce-addr
6412 is specified, get the address into a register. */
6414 else if (GET_CODE (DECL_RTL (exp)) == MEM
6415 && modifier != EXPAND_CONST_ADDRESS
6416 && modifier != EXPAND_SUM
6417 && modifier != EXPAND_INITIALIZER
6418 && (! memory_address_p (DECL_MODE (exp),
6419 XEXP (DECL_RTL (exp), 0))
6421 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6424 *alt_rtl = DECL_RTL (exp);
6425 temp = replace_equiv_address (DECL_RTL (exp),
6426 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6429 /* If we got something, return it. But first, set the alignment
6430 if the address is a register. */
6433 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6434 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6439 /* If the mode of DECL_RTL does not match that of the decl, it
6440 must be a promoted value. We return a SUBREG of the wanted mode,
6441 but mark it so that we know that it was already extended. */
6443 if (GET_CODE (DECL_RTL (exp)) == REG
6444 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6446 /* Get the signedness used for this variable. Ensure we get the
6447 same mode we got when the variable was declared. */
6448 if (GET_MODE (DECL_RTL (exp))
6449 != promote_mode (type, DECL_MODE (exp), &unsignedp,
6450 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
6453 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6454 SUBREG_PROMOTED_VAR_P (temp) = 1;
6455 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6459 return DECL_RTL (exp);
6462 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6463 TREE_INT_CST_HIGH (exp), mode);
6465 /* ??? If overflow is set, fold will have done an incomplete job,
6466 which can result in (plus xx (const_int 0)), which can get
6467 simplified by validate_replace_rtx during virtual register
6468 instantiation, which can result in unrecognizable insns.
6469 Avoid this by forcing all overflows into registers. */
6470 if (TREE_CONSTANT_OVERFLOW (exp)
6471 && modifier != EXPAND_INITIALIZER)
6472 temp = force_reg (mode, temp);
6477 return const_vector_from_tree (exp);
6480 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
6483 /* If optimized, generate immediate CONST_DOUBLE
6484 which will be turned into memory by reload if necessary.
6486 We used to force a register so that loop.c could see it. But
6487 this does not allow gen_* patterns to perform optimizations with
6488 the constants. It also produces two insns in cases like "x = 1.0;".
6489 On most machines, floating-point constants are not permitted in
6490 many insns, so we'd end up copying it to a register in any case.
6492 Now, we do the copying in expand_binop, if appropriate. */
6493 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6494 TYPE_MODE (TREE_TYPE (exp)));
6497 /* Handle evaluating a complex constant in a CONCAT target. */
6498 if (original_target && GET_CODE (original_target) == CONCAT)
6500 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6503 rtarg = XEXP (original_target, 0);
6504 itarg = XEXP (original_target, 1);
6506 /* Move the real and imaginary parts separately. */
6507 op0 = expand_expr (TREE_REALPART (exp), rtarg, mode, 0);
6508 op1 = expand_expr (TREE_IMAGPART (exp), itarg, mode, 0);
6511 emit_move_insn (rtarg, op0);
6513 emit_move_insn (itarg, op1);
6515 return original_target;
6518 /* ... fall through ... */
6521 temp = output_constant_def (exp, 1);
6523 /* temp contains a constant address.
6524 On RISC machines where a constant address isn't valid,
6525 make some insns to get that address into a register. */
6526 if (modifier != EXPAND_CONST_ADDRESS
6527 && modifier != EXPAND_INITIALIZER
6528 && modifier != EXPAND_SUM
6529 && (! memory_address_p (mode, XEXP (temp, 0))
6530 || flag_force_addr))
6531 return replace_equiv_address (temp,
6532 copy_rtx (XEXP (temp, 0)));
6535 case EXPR_WITH_FILE_LOCATION:
6538 struct file_stack fs;
6540 fs.location = input_location;
6541 fs.next = expr_wfl_stack;
6542 input_filename = EXPR_WFL_FILENAME (exp);
6543 input_line = EXPR_WFL_LINENO (exp);
6544 expr_wfl_stack = &fs;
6545 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6546 emit_line_note (input_location);
6547 /* Possibly avoid switching back and forth here. */
6548 to_return = expand_expr (EXPR_WFL_NODE (exp),
6549 (ignore ? const0_rtx : target),
6551 if (expr_wfl_stack != &fs)
6553 input_location = fs.location;
6554 expr_wfl_stack = fs.next;
6559 context = decl_function_context (exp);
6561 /* If this SAVE_EXPR was at global context, assume we are an
6562 initialization function and move it into our context. */
6564 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6566 /* We treat inline_function_decl as an alias for the current function
6567 because that is the inline function whose vars, types, etc.
6568 are being merged into the current function.
6569 See expand_inline_function. */
6570 if (context == current_function_decl || context == inline_function_decl)
6573 /* If this is non-local, handle it. */
6576 /* The following call just exists to abort if the context is
6577 not of a containing function. */
6578 find_function_data (context);
6580 temp = SAVE_EXPR_RTL (exp);
6581 if (temp && GET_CODE (temp) == REG)
6583 put_var_into_stack (exp, /*rescan=*/true);
6584 temp = SAVE_EXPR_RTL (exp);
6586 if (temp == 0 || GET_CODE (temp) != MEM)
6589 replace_equiv_address (temp,
6590 fix_lexical_addr (XEXP (temp, 0), exp));
6592 if (SAVE_EXPR_RTL (exp) == 0)
6594 if (mode == VOIDmode)
6597 temp = assign_temp (build_qualified_type (type,
6599 | TYPE_QUAL_CONST)),
6602 SAVE_EXPR_RTL (exp) = temp;
6603 if (!optimize && GET_CODE (temp) == REG)
6604 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6607 /* If the mode of TEMP does not match that of the expression, it
6608 must be a promoted value. We pass store_expr a SUBREG of the
6609 wanted mode but mark it so that we know that it was already
6612 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6614 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6615 promote_mode (type, mode, &unsignedp, 0);
6616 SUBREG_PROMOTED_VAR_P (temp) = 1;
6617 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6620 if (temp == const0_rtx)
6621 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6623 store_expr (TREE_OPERAND (exp, 0), temp,
6624 modifier == EXPAND_STACK_PARM ? 2 : 0);
6626 TREE_USED (exp) = 1;
6629 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6630 must be a promoted value. We return a SUBREG of the wanted mode,
6631 but mark it so that we know that it was already extended. */
6633 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6634 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6636 /* Compute the signedness and make the proper SUBREG. */
6637 promote_mode (type, mode, &unsignedp, 0);
6638 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6639 SUBREG_PROMOTED_VAR_P (temp) = 1;
6640 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6644 return SAVE_EXPR_RTL (exp);
6649 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6650 TREE_OPERAND (exp, 0)
6651 = (*lang_hooks.unsave_expr_now) (TREE_OPERAND (exp, 0));
6655 case PLACEHOLDER_EXPR:
6657 tree old_list = placeholder_list;
6658 tree placeholder_expr = 0;
6660 exp = find_placeholder (exp, &placeholder_expr);
6664 placeholder_list = TREE_CHAIN (placeholder_expr);
6665 temp = expand_expr (exp, original_target, tmode, modifier);
6666 placeholder_list = old_list;
6670 case WITH_RECORD_EXPR:
6671 /* Put the object on the placeholder list, expand our first operand,
6672 and pop the list. */
6673 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6675 target = expand_expr (TREE_OPERAND (exp, 0), original_target, tmode,
6677 placeholder_list = TREE_CHAIN (placeholder_list);
6681 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6682 expand_goto (TREE_OPERAND (exp, 0));
6684 expand_computed_goto (TREE_OPERAND (exp, 0));
6688 expand_exit_loop_if_false (NULL,
6689 invert_truthvalue (TREE_OPERAND (exp, 0)));
6692 case LABELED_BLOCK_EXPR:
6693 if (LABELED_BLOCK_BODY (exp))
6694 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1);
6695 /* Should perhaps use expand_label, but this is simpler and safer. */
6696 do_pending_stack_adjust ();
6697 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6700 case EXIT_BLOCK_EXPR:
6701 if (EXIT_BLOCK_RETURN (exp))
6702 sorry ("returned value in block_exit_expr");
6703 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6708 expand_start_loop (1);
6709 expand_expr_stmt_value (TREE_OPERAND (exp, 0), 0, 1);
6717 tree vars = TREE_OPERAND (exp, 0);
6719 /* Need to open a binding contour here because
6720 if there are any cleanups they must be contained here. */
6721 expand_start_bindings (2);
6723 /* Mark the corresponding BLOCK for output in its proper place. */
6724 if (TREE_OPERAND (exp, 2) != 0
6725 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6726 (*lang_hooks.decls.insert_block) (TREE_OPERAND (exp, 2));
6728 /* If VARS have not yet been expanded, expand them now. */
6731 if (!DECL_RTL_SET_P (vars))
6733 expand_decl_init (vars);
6734 vars = TREE_CHAIN (vars);
6737 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
6739 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6745 if (RTL_EXPR_SEQUENCE (exp))
6747 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6749 emit_insn (RTL_EXPR_SEQUENCE (exp));
6750 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6752 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6753 free_temps_for_rtl_expr (exp);
6755 *alt_rtl = RTL_EXPR_ALT_RTL (exp);
6756 return RTL_EXPR_RTL (exp);
6759 /* If we don't need the result, just ensure we evaluate any
6765 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6766 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
6771 /* All elts simple constants => refer to a constant in memory. But
6772 if this is a non-BLKmode mode, let it store a field at a time
6773 since that should make a CONST_INT or CONST_DOUBLE when we
6774 fold. Likewise, if we have a target we can use, it is best to
6775 store directly into the target unless the type is large enough
6776 that memcpy will be used. If we are making an initializer and
6777 all operands are constant, put it in memory as well.
6779 FIXME: Avoid trying to fill vector constructors piece-meal.
6780 Output them with output_constant_def below unless we're sure
6781 they're zeros. This should go away when vector initializers
6782 are treated like VECTOR_CST instead of arrays.
6784 else if ((TREE_STATIC (exp)
6785 && ((mode == BLKmode
6786 && ! (target != 0 && safe_from_p (target, exp, 1)))
6787 || TREE_ADDRESSABLE (exp)
6788 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
6789 && (! MOVE_BY_PIECES_P
6790 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6792 && ((TREE_CODE (type) == VECTOR_TYPE
6793 && !is_zeros_p (exp))
6794 || ! mostly_zeros_p (exp)))))
6795 || ((modifier == EXPAND_INITIALIZER
6796 || modifier == EXPAND_CONST_ADDRESS)
6797 && TREE_CONSTANT (exp)))
6799 rtx constructor = output_constant_def (exp, 1);
6801 if (modifier != EXPAND_CONST_ADDRESS
6802 && modifier != EXPAND_INITIALIZER
6803 && modifier != EXPAND_SUM)
6804 constructor = validize_mem (constructor);
6810 /* Handle calls that pass values in multiple non-contiguous
6811 locations. The Irix 6 ABI has examples of this. */
6812 if (target == 0 || ! safe_from_p (target, exp, 1)
6813 || GET_CODE (target) == PARALLEL
6814 || modifier == EXPAND_STACK_PARM)
6816 = assign_temp (build_qualified_type (type,
6818 | (TREE_READONLY (exp)
6819 * TYPE_QUAL_CONST))),
6820 0, TREE_ADDRESSABLE (exp), 1);
6822 store_constructor (exp, target, 0, int_expr_size (exp));
6828 tree exp1 = TREE_OPERAND (exp, 0);
6830 tree string = string_constant (exp1, &index);
6832 /* Try to optimize reads from const strings. */
6834 && TREE_CODE (string) == STRING_CST
6835 && TREE_CODE (index) == INTEGER_CST
6836 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
6837 && GET_MODE_CLASS (mode) == MODE_INT
6838 && GET_MODE_SIZE (mode) == 1
6839 && modifier != EXPAND_WRITE)
6840 return gen_int_mode (TREE_STRING_POINTER (string)
6841 [TREE_INT_CST_LOW (index)], mode);
6843 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6844 op0 = memory_address (mode, op0);
6845 temp = gen_rtx_MEM (mode, op0);
6846 set_mem_attributes (temp, exp, 0);
6848 /* If we are writing to this object and its type is a record with
6849 readonly fields, we must mark it as readonly so it will
6850 conflict with readonly references to those fields. */
6851 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
6852 RTX_UNCHANGING_P (temp) = 1;
6858 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6862 tree array = TREE_OPERAND (exp, 0);
6863 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6864 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6865 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
6868 /* Optimize the special-case of a zero lower bound.
6870 We convert the low_bound to sizetype to avoid some problems
6871 with constant folding. (E.g. suppose the lower bound is 1,
6872 and its mode is QI. Without the conversion, (ARRAY
6873 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6874 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
6876 if (! integer_zerop (low_bound))
6877 index = size_diffop (index, convert (sizetype, low_bound));
6879 /* Fold an expression like: "foo"[2].
6880 This is not done in fold so it won't happen inside &.
6881 Don't fold if this is for wide characters since it's too
6882 difficult to do correctly and this is a very rare case. */
6884 if (modifier != EXPAND_CONST_ADDRESS
6885 && modifier != EXPAND_INITIALIZER
6886 && modifier != EXPAND_MEMORY
6887 && TREE_CODE (array) == STRING_CST
6888 && TREE_CODE (index) == INTEGER_CST
6889 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
6890 && GET_MODE_CLASS (mode) == MODE_INT
6891 && GET_MODE_SIZE (mode) == 1)
6892 return gen_int_mode (TREE_STRING_POINTER (array)
6893 [TREE_INT_CST_LOW (index)], mode);
6895 /* If this is a constant index into a constant array,
6896 just get the value from the array. Handle both the cases when
6897 we have an explicit constructor and when our operand is a variable
6898 that was declared const. */
6900 if (modifier != EXPAND_CONST_ADDRESS
6901 && modifier != EXPAND_INITIALIZER
6902 && modifier != EXPAND_MEMORY
6903 && TREE_CODE (array) == CONSTRUCTOR
6904 && ! TREE_SIDE_EFFECTS (array)
6905 && TREE_CODE (index) == INTEGER_CST
6906 && 0 > compare_tree_int (index,
6907 list_length (CONSTRUCTOR_ELTS
6908 (TREE_OPERAND (exp, 0)))))
6912 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6913 i = TREE_INT_CST_LOW (index);
6914 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6918 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
6922 else if (optimize >= 1
6923 && modifier != EXPAND_CONST_ADDRESS
6924 && modifier != EXPAND_INITIALIZER
6925 && modifier != EXPAND_MEMORY
6926 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6927 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6928 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK
6929 && targetm.binds_local_p (array))
6931 if (TREE_CODE (index) == INTEGER_CST)
6933 tree init = DECL_INITIAL (array);
6935 if (TREE_CODE (init) == CONSTRUCTOR)
6939 for (elem = CONSTRUCTOR_ELTS (init);
6941 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
6942 elem = TREE_CHAIN (elem))
6945 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
6946 return expand_expr (fold (TREE_VALUE (elem)), target,
6949 else if (TREE_CODE (init) == STRING_CST
6950 && 0 > compare_tree_int (index,
6951 TREE_STRING_LENGTH (init)))
6953 tree type = TREE_TYPE (TREE_TYPE (init));
6954 enum machine_mode mode = TYPE_MODE (type);
6956 if (GET_MODE_CLASS (mode) == MODE_INT
6957 && GET_MODE_SIZE (mode) == 1)
6958 return gen_int_mode (TREE_STRING_POINTER (init)
6959 [TREE_INT_CST_LOW (index)], mode);
6964 goto normal_inner_ref;
6967 /* If the operand is a CONSTRUCTOR, we can just extract the
6968 appropriate field if it is present. */
6969 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
6973 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6974 elt = TREE_CHAIN (elt))
6975 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6976 /* We can normally use the value of the field in the
6977 CONSTRUCTOR. However, if this is a bitfield in
6978 an integral mode that we can fit in a HOST_WIDE_INT,
6979 we must mask only the number of bits in the bitfield,
6980 since this is done implicitly by the constructor. If
6981 the bitfield does not meet either of those conditions,
6982 we can't do this optimization. */
6983 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6984 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6986 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6987 <= HOST_BITS_PER_WIDE_INT))))
6989 if (DECL_BIT_FIELD (TREE_PURPOSE (elt))
6990 && modifier == EXPAND_STACK_PARM)
6992 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6993 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6995 HOST_WIDE_INT bitsize
6996 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
6997 enum machine_mode imode
6998 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
7000 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
7002 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7003 op0 = expand_and (imode, op0, op1, target);
7008 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
7011 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7013 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7021 goto normal_inner_ref;
7024 case ARRAY_RANGE_REF:
7027 enum machine_mode mode1;
7028 HOST_WIDE_INT bitsize, bitpos;
7031 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7032 &mode1, &unsignedp, &volatilep);
7035 /* If we got back the original object, something is wrong. Perhaps
7036 we are evaluating an expression too early. In any event, don't
7037 infinitely recurse. */
7041 /* If TEM's type is a union of variable size, pass TARGET to the inner
7042 computation, since it will need a temporary and TARGET is known
7043 to have to do. This occurs in unchecked conversion in Ada. */
7047 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7048 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7050 && modifier != EXPAND_STACK_PARM
7051 ? target : NULL_RTX),
7053 (modifier == EXPAND_INITIALIZER
7054 || modifier == EXPAND_CONST_ADDRESS
7055 || modifier == EXPAND_STACK_PARM)
7056 ? modifier : EXPAND_NORMAL);
7058 /* If this is a constant, put it into a register if it is a
7059 legitimate constant and OFFSET is 0 and memory if it isn't. */
7060 if (CONSTANT_P (op0))
7062 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7063 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7065 op0 = force_reg (mode, op0);
7067 op0 = validize_mem (force_const_mem (mode, op0));
7070 /* Otherwise, if this object not in memory and we either have an
7071 offset or a BLKmode result, put it there. This case can't occur in
7072 C, but can in Ada if we have unchecked conversion of an expression
7073 from a scalar type to an array or record type or for an
7074 ARRAY_RANGE_REF whose type is BLKmode. */
7075 else if (GET_CODE (op0) != MEM
7077 || (code == ARRAY_RANGE_REF && mode == BLKmode)))
7079 /* If the operand is a SAVE_EXPR, we can deal with this by
7080 forcing the SAVE_EXPR into memory. */
7081 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
7083 put_var_into_stack (TREE_OPERAND (exp, 0),
7085 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7090 = build_qualified_type (TREE_TYPE (tem),
7091 (TYPE_QUALS (TREE_TYPE (tem))
7092 | TYPE_QUAL_CONST));
7093 rtx memloc = assign_temp (nt, 1, 1, 1);
7095 emit_move_insn (memloc, op0);
7102 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7105 if (GET_CODE (op0) != MEM)
7108 #ifdef POINTERS_EXTEND_UNSIGNED
7109 if (GET_MODE (offset_rtx) != Pmode)
7110 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
7112 if (GET_MODE (offset_rtx) != ptr_mode)
7113 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7116 if (GET_MODE (op0) == BLKmode
7117 /* A constant address in OP0 can have VOIDmode, we must
7118 not try to call force_reg in that case. */
7119 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7121 && (bitpos % bitsize) == 0
7122 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7123 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7125 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7129 op0 = offset_address (op0, offset_rtx,
7130 highest_pow2_factor (offset));
7133 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7134 record its alignment as BIGGEST_ALIGNMENT. */
7135 if (GET_CODE (op0) == MEM && bitpos == 0 && offset != 0
7136 && is_aligning_offset (offset, tem))
7137 set_mem_align (op0, BIGGEST_ALIGNMENT);
7139 /* Don't forget about volatility even if this is a bitfield. */
7140 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7142 if (op0 == orig_op0)
7143 op0 = copy_rtx (op0);
7145 MEM_VOLATILE_P (op0) = 1;
7148 /* The following code doesn't handle CONCAT.
7149 Assume only bitpos == 0 can be used for CONCAT, due to
7150 one element arrays having the same mode as its element. */
7151 if (GET_CODE (op0) == CONCAT)
7153 if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
7158 /* In cases where an aligned union has an unaligned object
7159 as a field, we might be extracting a BLKmode value from
7160 an integer-mode (e.g., SImode) object. Handle this case
7161 by doing the extract into an object as wide as the field
7162 (which we know to be the width of a basic mode), then
7163 storing into memory, and changing the mode to BLKmode. */
7164 if (mode1 == VOIDmode
7165 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7166 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7167 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7168 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7169 && modifier != EXPAND_CONST_ADDRESS
7170 && modifier != EXPAND_INITIALIZER)
7171 /* If the field isn't aligned enough to fetch as a memref,
7172 fetch it as a bit field. */
7173 || (mode1 != BLKmode
7174 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7175 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)
7176 || (GET_CODE (op0) == MEM
7177 && (MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (mode1)
7178 || (bitpos % GET_MODE_ALIGNMENT (mode1) != 0))))
7179 && ((modifier == EXPAND_CONST_ADDRESS
7180 || modifier == EXPAND_INITIALIZER)
7182 : SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))))
7183 || (bitpos % BITS_PER_UNIT != 0)))
7184 /* If the type and the field are a constant size and the
7185 size of the type isn't the same size as the bitfield,
7186 we must use bitfield operations. */
7188 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7190 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7193 enum machine_mode ext_mode = mode;
7195 if (ext_mode == BLKmode
7196 && ! (target != 0 && GET_CODE (op0) == MEM
7197 && GET_CODE (target) == MEM
7198 && bitpos % BITS_PER_UNIT == 0))
7199 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7201 if (ext_mode == BLKmode)
7204 target = assign_temp (type, 0, 1, 1);
7209 /* In this case, BITPOS must start at a byte boundary and
7210 TARGET, if specified, must be a MEM. */
7211 if (GET_CODE (op0) != MEM
7212 || (target != 0 && GET_CODE (target) != MEM)
7213 || bitpos % BITS_PER_UNIT != 0)
7216 emit_block_move (target,
7217 adjust_address (op0, VOIDmode,
7218 bitpos / BITS_PER_UNIT),
7219 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7221 (modifier == EXPAND_STACK_PARM
7222 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7227 op0 = validize_mem (op0);
7229 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7230 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7232 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7233 (modifier == EXPAND_STACK_PARM
7234 ? NULL_RTX : target),
7236 int_size_in_bytes (TREE_TYPE (tem)));
7238 /* If the result is a record type and BITSIZE is narrower than
7239 the mode of OP0, an integral mode, and this is a big endian
7240 machine, we must put the field into the high-order bits. */
7241 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7242 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7243 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7244 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7245 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7249 if (mode == BLKmode)
7251 rtx new = assign_temp (build_qualified_type
7252 ((*lang_hooks.types.type_for_mode)
7254 TYPE_QUAL_CONST), 0, 1, 1);
7256 emit_move_insn (new, op0);
7257 op0 = copy_rtx (new);
7258 PUT_MODE (op0, BLKmode);
7259 set_mem_attributes (op0, exp, 1);
7265 /* If the result is BLKmode, use that to access the object
7267 if (mode == BLKmode)
7270 /* Get a reference to just this component. */
7271 if (modifier == EXPAND_CONST_ADDRESS
7272 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7273 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7275 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7277 if (op0 == orig_op0)
7278 op0 = copy_rtx (op0);
7280 set_mem_attributes (op0, exp, 0);
7281 if (GET_CODE (XEXP (op0, 0)) == REG)
7282 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7284 MEM_VOLATILE_P (op0) |= volatilep;
7285 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7286 || modifier == EXPAND_CONST_ADDRESS
7287 || modifier == EXPAND_INITIALIZER)
7289 else if (target == 0)
7290 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7292 convert_move (target, op0, unsignedp);
7298 rtx insn, before = get_last_insn (), vtbl_ref;
7300 /* Evaluate the interior expression. */
7301 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7304 /* Get or create an instruction off which to hang a note. */
7305 if (REG_P (subtarget))
7308 insn = get_last_insn ();
7311 if (! INSN_P (insn))
7312 insn = prev_nonnote_insn (insn);
7316 target = gen_reg_rtx (GET_MODE (subtarget));
7317 insn = emit_move_insn (target, subtarget);
7320 /* Collect the data for the note. */
7321 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7322 vtbl_ref = plus_constant (vtbl_ref,
7323 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7324 /* Discard the initial CONST that was added. */
7325 vtbl_ref = XEXP (vtbl_ref, 0);
7328 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7333 /* Intended for a reference to a buffer of a file-object in Pascal.
7334 But it's not certain that a special tree code will really be
7335 necessary for these. INDIRECT_REF might work for them. */
7341 /* Pascal set IN expression.
7344 rlo = set_low - (set_low%bits_per_word);
7345 the_word = set [ (index - rlo)/bits_per_word ];
7346 bit_index = index % bits_per_word;
7347 bitmask = 1 << bit_index;
7348 return !!(the_word & bitmask); */
7350 tree set = TREE_OPERAND (exp, 0);
7351 tree index = TREE_OPERAND (exp, 1);
7352 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7353 tree set_type = TREE_TYPE (set);
7354 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7355 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7356 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7357 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7358 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7359 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7360 rtx setaddr = XEXP (setval, 0);
7361 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7363 rtx diff, quo, rem, addr, bit, result;
7365 /* If domain is empty, answer is no. Likewise if index is constant
7366 and out of bounds. */
7367 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7368 && TREE_CODE (set_low_bound) == INTEGER_CST
7369 && tree_int_cst_lt (set_high_bound, set_low_bound))
7370 || (TREE_CODE (index) == INTEGER_CST
7371 && TREE_CODE (set_low_bound) == INTEGER_CST
7372 && tree_int_cst_lt (index, set_low_bound))
7373 || (TREE_CODE (set_high_bound) == INTEGER_CST
7374 && TREE_CODE (index) == INTEGER_CST
7375 && tree_int_cst_lt (set_high_bound, index))))
7379 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7381 /* If we get here, we have to generate the code for both cases
7382 (in range and out of range). */
7384 op0 = gen_label_rtx ();
7385 op1 = gen_label_rtx ();
7387 if (! (GET_CODE (index_val) == CONST_INT
7388 && GET_CODE (lo_r) == CONST_INT))
7389 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7390 GET_MODE (index_val), iunsignedp, op1);
7392 if (! (GET_CODE (index_val) == CONST_INT
7393 && GET_CODE (hi_r) == CONST_INT))
7394 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7395 GET_MODE (index_val), iunsignedp, op1);
7397 /* Calculate the element number of bit zero in the first word
7399 if (GET_CODE (lo_r) == CONST_INT)
7400 rlow = GEN_INT (INTVAL (lo_r)
7401 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7403 rlow = expand_binop (index_mode, and_optab, lo_r,
7404 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7405 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7407 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7408 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7410 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7411 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7412 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7413 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7415 addr = memory_address (byte_mode,
7416 expand_binop (index_mode, add_optab, diff,
7417 setaddr, NULL_RTX, iunsignedp,
7420 /* Extract the bit we want to examine. */
7421 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7422 gen_rtx_MEM (byte_mode, addr),
7423 make_tree (TREE_TYPE (index), rem),
7425 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7426 GET_MODE (target) == byte_mode ? target : 0,
7427 1, OPTAB_LIB_WIDEN);
7429 if (result != target)
7430 convert_move (target, result, 1);
7432 /* Output the code to handle the out-of-range case. */
7435 emit_move_insn (target, const0_rtx);
7440 case WITH_CLEANUP_EXPR:
7441 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7443 WITH_CLEANUP_EXPR_RTL (exp)
7444 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7445 expand_decl_cleanup_eh (NULL_TREE, TREE_OPERAND (exp, 1),
7446 CLEANUP_EH_ONLY (exp));
7448 /* That's it for this cleanup. */
7449 TREE_OPERAND (exp, 1) = 0;
7451 return WITH_CLEANUP_EXPR_RTL (exp);
7453 case CLEANUP_POINT_EXPR:
7455 /* Start a new binding layer that will keep track of all cleanup
7456 actions to be performed. */
7457 expand_start_bindings (2);
7459 target_temp_slot_level = temp_slot_level;
7461 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7462 /* If we're going to use this value, load it up now. */
7464 op0 = force_not_mem (op0);
7465 preserve_temp_slots (op0);
7466 expand_end_bindings (NULL_TREE, 0, 0);
7471 /* Check for a built-in function. */
7472 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7473 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7475 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7477 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7478 == BUILT_IN_FRONTEND)
7479 return (*lang_hooks.expand_expr) (exp, original_target,
7483 return expand_builtin (exp, target, subtarget, tmode, ignore);
7486 return expand_call (exp, target, ignore);
7488 case NON_LVALUE_EXPR:
7491 case REFERENCE_EXPR:
7492 if (TREE_OPERAND (exp, 0) == error_mark_node)
7495 if (TREE_CODE (type) == UNION_TYPE)
7497 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7499 /* If both input and output are BLKmode, this conversion isn't doing
7500 anything except possibly changing memory attribute. */
7501 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7503 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7506 result = copy_rtx (result);
7507 set_mem_attributes (result, exp, 0);
7513 if (TYPE_MODE (type) != BLKmode)
7514 target = gen_reg_rtx (TYPE_MODE (type));
7516 target = assign_temp (type, 0, 1, 1);
7519 if (GET_CODE (target) == MEM)
7520 /* Store data into beginning of memory target. */
7521 store_expr (TREE_OPERAND (exp, 0),
7522 adjust_address (target, TYPE_MODE (valtype), 0),
7523 modifier == EXPAND_STACK_PARM ? 2 : 0);
7525 else if (GET_CODE (target) == REG)
7526 /* Store this field into a union of the proper type. */
7527 store_field (target,
7528 MIN ((int_size_in_bytes (TREE_TYPE
7529 (TREE_OPERAND (exp, 0)))
7531 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7532 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7533 VOIDmode, 0, type, 0);
7537 /* Return the entire union. */
7541 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7543 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7546 /* If the signedness of the conversion differs and OP0 is
7547 a promoted SUBREG, clear that indication since we now
7548 have to do the proper extension. */
7549 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7550 && GET_CODE (op0) == SUBREG)
7551 SUBREG_PROMOTED_VAR_P (op0) = 0;
7556 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7557 if (GET_MODE (op0) == mode)
7560 /* If OP0 is a constant, just convert it into the proper mode. */
7561 if (CONSTANT_P (op0))
7563 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7564 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7566 if (modifier == EXPAND_INITIALIZER)
7567 return simplify_gen_subreg (mode, op0, inner_mode,
7568 subreg_lowpart_offset (mode,
7571 return convert_modes (mode, inner_mode, op0,
7572 TREE_UNSIGNED (inner_type));
7575 if (modifier == EXPAND_INITIALIZER)
7576 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7580 convert_to_mode (mode, op0,
7581 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7583 convert_move (target, op0,
7584 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7587 case VIEW_CONVERT_EXPR:
7588 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7590 /* If the input and output modes are both the same, we are done.
7591 Otherwise, if neither mode is BLKmode and both are integral and within
7592 a word, we can use gen_lowpart. If neither is true, make sure the
7593 operand is in memory and convert the MEM to the new mode. */
7594 if (TYPE_MODE (type) == GET_MODE (op0))
7596 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7597 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7598 && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT
7599 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7600 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7601 op0 = gen_lowpart (TYPE_MODE (type), op0);
7602 else if (GET_CODE (op0) != MEM)
7604 /* If the operand is not a MEM, force it into memory. Since we
7605 are going to be be changing the mode of the MEM, don't call
7606 force_const_mem for constants because we don't allow pool
7607 constants to change mode. */
7608 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7610 if (TREE_ADDRESSABLE (exp))
7613 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7615 = assign_stack_temp_for_type
7616 (TYPE_MODE (inner_type),
7617 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7619 emit_move_insn (target, op0);
7623 /* At this point, OP0 is in the correct mode. If the output type is such
7624 that the operand is known to be aligned, indicate that it is.
7625 Otherwise, we need only be concerned about alignment for non-BLKmode
7627 if (GET_CODE (op0) == MEM)
7629 op0 = copy_rtx (op0);
7631 if (TYPE_ALIGN_OK (type))
7632 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7633 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7634 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7636 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7637 HOST_WIDE_INT temp_size
7638 = MAX (int_size_in_bytes (inner_type),
7639 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7640 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7641 temp_size, 0, type);
7642 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7644 if (TREE_ADDRESSABLE (exp))
7647 if (GET_MODE (op0) == BLKmode)
7648 emit_block_move (new_with_op0_mode, op0,
7649 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7650 (modifier == EXPAND_STACK_PARM
7651 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
7653 emit_move_insn (new_with_op0_mode, op0);
7658 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7664 this_optab = ! unsignedp && flag_trapv
7665 && (GET_MODE_CLASS (mode) == MODE_INT)
7666 ? addv_optab : add_optab;
7668 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7669 something else, make sure we add the register to the constant and
7670 then to the other thing. This case can occur during strength
7671 reduction and doing it this way will produce better code if the
7672 frame pointer or argument pointer is eliminated.
7674 fold-const.c will ensure that the constant is always in the inner
7675 PLUS_EXPR, so the only case we need to do anything about is if
7676 sp, ap, or fp is our second argument, in which case we must swap
7677 the innermost first argument and our second argument. */
7679 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7680 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7681 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7682 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7683 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7684 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7686 tree t = TREE_OPERAND (exp, 1);
7688 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7689 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7692 /* If the result is to be ptr_mode and we are adding an integer to
7693 something, we might be forming a constant. So try to use
7694 plus_constant. If it produces a sum and we can't accept it,
7695 use force_operand. This allows P = &ARR[const] to generate
7696 efficient code on machines where a SYMBOL_REF is not a valid
7699 If this is an EXPAND_SUM call, always return the sum. */
7700 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7701 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7703 if (modifier == EXPAND_STACK_PARM)
7705 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7706 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7707 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7711 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7713 /* Use immed_double_const to ensure that the constant is
7714 truncated according to the mode of OP1, then sign extended
7715 to a HOST_WIDE_INT. Using the constant directly can result
7716 in non-canonical RTL in a 64x32 cross compile. */
7718 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7720 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7721 op1 = plus_constant (op1, INTVAL (constant_part));
7722 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7723 op1 = force_operand (op1, target);
7727 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7728 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7729 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7733 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7734 (modifier == EXPAND_INITIALIZER
7735 ? EXPAND_INITIALIZER : EXPAND_SUM));
7736 if (! CONSTANT_P (op0))
7738 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7739 VOIDmode, modifier);
7740 /* Return a PLUS if modifier says it's OK. */
7741 if (modifier == EXPAND_SUM
7742 || modifier == EXPAND_INITIALIZER)
7743 return simplify_gen_binary (PLUS, mode, op0, op1);
7746 /* Use immed_double_const to ensure that the constant is
7747 truncated according to the mode of OP1, then sign extended
7748 to a HOST_WIDE_INT. Using the constant directly can result
7749 in non-canonical RTL in a 64x32 cross compile. */
7751 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7753 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7754 op0 = plus_constant (op0, INTVAL (constant_part));
7755 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7756 op0 = force_operand (op0, target);
7761 /* No sense saving up arithmetic to be done
7762 if it's all in the wrong mode to form part of an address.
7763 And force_operand won't know whether to sign-extend or
7765 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7766 || mode != ptr_mode)
7768 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7769 subtarget, &op0, &op1, 0);
7770 if (op0 == const0_rtx)
7772 if (op1 == const0_rtx)
7777 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7778 subtarget, &op0, &op1, modifier);
7779 return simplify_gen_binary (PLUS, mode, op0, op1);
7782 /* For initializers, we are allowed to return a MINUS of two
7783 symbolic constants. Here we handle all cases when both operands
7785 /* Handle difference of two symbolic constants,
7786 for the sake of an initializer. */
7787 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7788 && really_constant_p (TREE_OPERAND (exp, 0))
7789 && really_constant_p (TREE_OPERAND (exp, 1)))
7791 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7792 NULL_RTX, &op0, &op1, modifier);
7794 /* If the last operand is a CONST_INT, use plus_constant of
7795 the negated constant. Else make the MINUS. */
7796 if (GET_CODE (op1) == CONST_INT)
7797 return plus_constant (op0, - INTVAL (op1));
7799 return gen_rtx_MINUS (mode, op0, op1);
7802 this_optab = ! unsignedp && flag_trapv
7803 && (GET_MODE_CLASS(mode) == MODE_INT)
7804 ? subv_optab : sub_optab;
7806 /* No sense saving up arithmetic to be done
7807 if it's all in the wrong mode to form part of an address.
7808 And force_operand won't know whether to sign-extend or
7810 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7811 || mode != ptr_mode)
7814 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7815 subtarget, &op0, &op1, modifier);
7817 /* Convert A - const to A + (-const). */
7818 if (GET_CODE (op1) == CONST_INT)
7820 op1 = negate_rtx (mode, op1);
7821 return simplify_gen_binary (PLUS, mode, op0, op1);
7827 /* If first operand is constant, swap them.
7828 Thus the following special case checks need only
7829 check the second operand. */
7830 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7832 tree t1 = TREE_OPERAND (exp, 0);
7833 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7834 TREE_OPERAND (exp, 1) = t1;
7837 /* Attempt to return something suitable for generating an
7838 indexed address, for machines that support that. */
7840 if (modifier == EXPAND_SUM && mode == ptr_mode
7841 && host_integerp (TREE_OPERAND (exp, 1), 0))
7843 tree exp1 = TREE_OPERAND (exp, 1);
7845 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7848 if (GET_CODE (op0) != REG)
7849 op0 = force_operand (op0, NULL_RTX);
7850 if (GET_CODE (op0) != REG)
7851 op0 = copy_to_mode_reg (mode, op0);
7853 return gen_rtx_MULT (mode, op0,
7854 gen_int_mode (tree_low_cst (exp1, 0),
7855 TYPE_MODE (TREE_TYPE (exp1))));
7858 if (modifier == EXPAND_STACK_PARM)
7861 /* Check for multiplying things that have been extended
7862 from a narrower type. If this machine supports multiplying
7863 in that narrower type with a result in the desired type,
7864 do it that way, and avoid the explicit type-conversion. */
7865 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7866 && TREE_CODE (type) == INTEGER_TYPE
7867 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7868 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7869 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7870 && int_fits_type_p (TREE_OPERAND (exp, 1),
7871 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7872 /* Don't use a widening multiply if a shift will do. */
7873 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
7874 > HOST_BITS_PER_WIDE_INT)
7875 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7877 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7878 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7880 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7881 /* If both operands are extended, they must either both
7882 be zero-extended or both be sign-extended. */
7883 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7885 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7887 enum machine_mode innermode
7888 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
7889 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7890 ? smul_widen_optab : umul_widen_optab);
7891 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7892 ? umul_widen_optab : smul_widen_optab);
7893 if (mode == GET_MODE_WIDER_MODE (innermode))
7895 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7897 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7898 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7899 TREE_OPERAND (exp, 1),
7900 NULL_RTX, &op0, &op1, 0);
7902 expand_operands (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7903 TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7904 NULL_RTX, &op0, &op1, 0);
7907 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7908 && innermode == word_mode)
7911 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7912 NULL_RTX, VOIDmode, 0);
7913 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7914 op1 = convert_modes (innermode, mode,
7915 expand_expr (TREE_OPERAND (exp, 1),
7916 NULL_RTX, VOIDmode, 0),
7919 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7920 NULL_RTX, VOIDmode, 0);
7921 temp = expand_binop (mode, other_optab, op0, op1, target,
7922 unsignedp, OPTAB_LIB_WIDEN);
7923 htem = expand_mult_highpart_adjust (innermode,
7924 gen_highpart (innermode, temp),
7926 gen_highpart (innermode, temp),
7928 emit_move_insn (gen_highpart (innermode, temp), htem);
7933 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7934 subtarget, &op0, &op1, 0);
7935 return expand_mult (mode, op0, op1, target, unsignedp);
7937 case TRUNC_DIV_EXPR:
7938 case FLOOR_DIV_EXPR:
7940 case ROUND_DIV_EXPR:
7941 case EXACT_DIV_EXPR:
7942 if (modifier == EXPAND_STACK_PARM)
7944 /* Possible optimization: compute the dividend with EXPAND_SUM
7945 then if the divisor is constant can optimize the case
7946 where some terms of the dividend have coeffs divisible by it. */
7947 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7948 subtarget, &op0, &op1, 0);
7949 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7952 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
7953 expensive divide. If not, combine will rebuild the original
7955 if (flag_unsafe_math_optimizations && optimize && !optimize_size
7956 && TREE_CODE (type) == REAL_TYPE
7957 && !real_onep (TREE_OPERAND (exp, 0)))
7958 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
7959 build (RDIV_EXPR, type,
7960 build_real (type, dconst1),
7961 TREE_OPERAND (exp, 1))),
7962 target, tmode, modifier);
7963 this_optab = sdiv_optab;
7966 case TRUNC_MOD_EXPR:
7967 case FLOOR_MOD_EXPR:
7969 case ROUND_MOD_EXPR:
7970 if (modifier == EXPAND_STACK_PARM)
7972 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
7973 subtarget, &op0, &op1, 0);
7974 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7976 case FIX_ROUND_EXPR:
7977 case FIX_FLOOR_EXPR:
7979 abort (); /* Not used for C. */
7981 case FIX_TRUNC_EXPR:
7982 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7983 if (target == 0 || modifier == EXPAND_STACK_PARM)
7984 target = gen_reg_rtx (mode);
7985 expand_fix (target, op0, unsignedp);
7989 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7990 if (target == 0 || modifier == EXPAND_STACK_PARM)
7991 target = gen_reg_rtx (mode);
7992 /* expand_float can't figure out what to do if FROM has VOIDmode.
7993 So give it the correct mode. With -O, cse will optimize this. */
7994 if (GET_MODE (op0) == VOIDmode)
7995 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7997 expand_float (target, op0,
7998 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8002 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8003 if (modifier == EXPAND_STACK_PARM)
8005 temp = expand_unop (mode,
8006 ! unsignedp && flag_trapv
8007 && (GET_MODE_CLASS(mode) == MODE_INT)
8008 ? negv_optab : neg_optab, op0, target, 0);
8014 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8015 if (modifier == EXPAND_STACK_PARM)
8018 /* ABS_EXPR is not valid for complex arguments. */
8019 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
8020 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
8023 /* Unsigned abs is simply the operand. Testing here means we don't
8024 risk generating incorrect code below. */
8025 if (TREE_UNSIGNED (type))
8028 return expand_abs (mode, op0, target, unsignedp,
8029 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8033 target = original_target;
8035 || modifier == EXPAND_STACK_PARM
8036 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
8037 || GET_MODE (target) != mode
8038 || (GET_CODE (target) == REG
8039 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8040 target = gen_reg_rtx (mode);
8041 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
8042 target, &op0, &op1, 0);
8044 /* First try to do it with a special MIN or MAX instruction.
8045 If that does not win, use a conditional jump to select the proper
8047 this_optab = (TREE_UNSIGNED (type)
8048 ? (code == MIN_EXPR ? umin_optab : umax_optab)
8049 : (code == MIN_EXPR ? smin_optab : smax_optab));
8051 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8056 /* At this point, a MEM target is no longer useful; we will get better
8059 if (GET_CODE (target) == MEM)
8060 target = gen_reg_rtx (mode);
8062 /* If op1 was placed in target, swap op0 and op1. */
8063 if (target != op0 && target == op1)
8071 emit_move_insn (target, op0);
8073 op0 = gen_label_rtx ();
8075 /* If this mode is an integer too wide to compare properly,
8076 compare word by word. Rely on cse to optimize constant cases. */
8077 if (GET_MODE_CLASS (mode) == MODE_INT
8078 && ! can_compare_p (GE, mode, ccp_jump))
8080 if (code == MAX_EXPR)
8081 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8082 target, op1, NULL_RTX, op0);
8084 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8085 op1, target, NULL_RTX, op0);
8089 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
8090 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
8091 unsignedp, mode, NULL_RTX, NULL_RTX,
8094 emit_move_insn (target, op1);
8099 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8100 if (modifier == EXPAND_STACK_PARM)
8102 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8107 /* ??? Can optimize bitwise operations with one arg constant.
8108 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8109 and (a bitwise1 b) bitwise2 b (etc)
8110 but that is probably not worth while. */
8112 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8113 boolean values when we want in all cases to compute both of them. In
8114 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8115 as actual zero-or-1 values and then bitwise anding. In cases where
8116 there cannot be any side effects, better code would be made by
8117 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8118 how to recognize those cases. */
8120 case TRUTH_AND_EXPR:
8122 this_optab = and_optab;
8127 this_optab = ior_optab;
8130 case TRUTH_XOR_EXPR:
8132 this_optab = xor_optab;
8139 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8141 if (modifier == EXPAND_STACK_PARM)
8143 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8144 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8147 /* Could determine the answer when only additive constants differ. Also,
8148 the addition of one can be handled by changing the condition. */
8155 case UNORDERED_EXPR:
8162 temp = do_store_flag (exp,
8163 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8164 tmode != VOIDmode ? tmode : mode, 0);
8168 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8169 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8171 && GET_CODE (original_target) == REG
8172 && (GET_MODE (original_target)
8173 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8175 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8178 /* If temp is constant, we can just compute the result. */
8179 if (GET_CODE (temp) == CONST_INT)
8181 if (INTVAL (temp) != 0)
8182 emit_move_insn (target, const1_rtx);
8184 emit_move_insn (target, const0_rtx);
8189 if (temp != original_target)
8191 enum machine_mode mode1 = GET_MODE (temp);
8192 if (mode1 == VOIDmode)
8193 mode1 = tmode != VOIDmode ? tmode : mode;
8195 temp = copy_to_mode_reg (mode1, temp);
8198 op1 = gen_label_rtx ();
8199 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8200 GET_MODE (temp), unsignedp, op1);
8201 emit_move_insn (temp, const1_rtx);
8206 /* If no set-flag instruction, must generate a conditional
8207 store into a temporary variable. Drop through
8208 and handle this like && and ||. */
8210 case TRUTH_ANDIF_EXPR:
8211 case TRUTH_ORIF_EXPR:
8214 || modifier == EXPAND_STACK_PARM
8215 || ! safe_from_p (target, exp, 1)
8216 /* Make sure we don't have a hard reg (such as function's return
8217 value) live across basic blocks, if not optimizing. */
8218 || (!optimize && GET_CODE (target) == REG
8219 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8220 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8223 emit_clr_insn (target);
8225 op1 = gen_label_rtx ();
8226 jumpifnot (exp, op1);
8229 emit_0_to_1_insn (target);
8232 return ignore ? const0_rtx : target;
8234 case TRUTH_NOT_EXPR:
8235 if (modifier == EXPAND_STACK_PARM)
8237 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8238 /* The parser is careful to generate TRUTH_NOT_EXPR
8239 only with operands that are always zero or one. */
8240 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8241 target, 1, OPTAB_LIB_WIDEN);
8247 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8249 return expand_expr_real (TREE_OPERAND (exp, 1),
8250 (ignore ? const0_rtx : target),
8251 VOIDmode, modifier, alt_rtl);
8254 /* If we would have a "singleton" (see below) were it not for a
8255 conversion in each arm, bring that conversion back out. */
8256 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8257 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8258 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8259 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8261 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8262 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8264 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8265 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8266 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8267 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8268 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8269 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8270 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8271 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8272 return expand_expr (build1 (NOP_EXPR, type,
8273 build (COND_EXPR, TREE_TYPE (iftrue),
8274 TREE_OPERAND (exp, 0),
8276 target, tmode, modifier);
8280 /* Note that COND_EXPRs whose type is a structure or union
8281 are required to be constructed to contain assignments of
8282 a temporary variable, so that we can evaluate them here
8283 for side effect only. If type is void, we must do likewise. */
8285 /* If an arm of the branch requires a cleanup,
8286 only that cleanup is performed. */
8289 tree binary_op = 0, unary_op = 0;
8291 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8292 convert it to our mode, if necessary. */
8293 if (integer_onep (TREE_OPERAND (exp, 1))
8294 && integer_zerop (TREE_OPERAND (exp, 2))
8295 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8299 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8304 if (modifier == EXPAND_STACK_PARM)
8306 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
8307 if (GET_MODE (op0) == mode)
8311 target = gen_reg_rtx (mode);
8312 convert_move (target, op0, unsignedp);
8316 /* Check for X ? A + B : A. If we have this, we can copy A to the
8317 output and conditionally add B. Similarly for unary operations.
8318 Don't do this if X has side-effects because those side effects
8319 might affect A or B and the "?" operation is a sequence point in
8320 ANSI. (operand_equal_p tests for side effects.) */
8322 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8323 && operand_equal_p (TREE_OPERAND (exp, 2),
8324 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8325 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8326 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8327 && operand_equal_p (TREE_OPERAND (exp, 1),
8328 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8329 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8330 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8331 && operand_equal_p (TREE_OPERAND (exp, 2),
8332 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8333 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8334 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8335 && operand_equal_p (TREE_OPERAND (exp, 1),
8336 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8337 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8339 /* If we are not to produce a result, we have no target. Otherwise,
8340 if a target was specified use it; it will not be used as an
8341 intermediate target unless it is safe. If no target, use a
8346 else if (modifier == EXPAND_STACK_PARM)
8347 temp = assign_temp (type, 0, 0, 1);
8348 else if (original_target
8349 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8350 || (singleton && GET_CODE (original_target) == REG
8351 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8352 && original_target == var_rtx (singleton)))
8353 && GET_MODE (original_target) == mode
8354 #ifdef HAVE_conditional_move
8355 && (! can_conditionally_move_p (mode)
8356 || GET_CODE (original_target) == REG
8357 || TREE_ADDRESSABLE (type))
8359 && (GET_CODE (original_target) != MEM
8360 || TREE_ADDRESSABLE (type)))
8361 temp = original_target;
8362 else if (TREE_ADDRESSABLE (type))
8365 temp = assign_temp (type, 0, 0, 1);
8367 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8368 do the test of X as a store-flag operation, do this as
8369 A + ((X != 0) << log C). Similarly for other simple binary
8370 operators. Only do for C == 1 if BRANCH_COST is low. */
8371 if (temp && singleton && binary_op
8372 && (TREE_CODE (binary_op) == PLUS_EXPR
8373 || TREE_CODE (binary_op) == MINUS_EXPR
8374 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8375 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8376 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8377 : integer_onep (TREE_OPERAND (binary_op, 1)))
8378 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8382 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8383 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8384 ? addv_optab : add_optab)
8385 : TREE_CODE (binary_op) == MINUS_EXPR
8386 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8387 ? subv_optab : sub_optab)
8388 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8391 /* If we had X ? A : A + 1, do this as A + (X == 0). */
8392 if (singleton == TREE_OPERAND (exp, 1))
8393 cond = invert_truthvalue (TREE_OPERAND (exp, 0));
8395 cond = TREE_OPERAND (exp, 0);
8397 result = do_store_flag (cond, (safe_from_p (temp, singleton, 1)
8399 mode, BRANCH_COST <= 1);
8401 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8402 result = expand_shift (LSHIFT_EXPR, mode, result,
8403 build_int_2 (tree_log2
8407 (safe_from_p (temp, singleton, 1)
8408 ? temp : NULL_RTX), 0);
8412 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8413 return expand_binop (mode, boptab, op1, result, temp,
8414 unsignedp, OPTAB_LIB_WIDEN);
8418 do_pending_stack_adjust ();
8420 op0 = gen_label_rtx ();
8422 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8426 /* If the target conflicts with the other operand of the
8427 binary op, we can't use it. Also, we can't use the target
8428 if it is a hard register, because evaluating the condition
8429 might clobber it. */
8431 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8432 || (GET_CODE (temp) == REG
8433 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8434 temp = gen_reg_rtx (mode);
8435 store_expr (singleton, temp,
8436 modifier == EXPAND_STACK_PARM ? 2 : 0);
8439 expand_expr (singleton,
8440 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8441 if (singleton == TREE_OPERAND (exp, 1))
8442 jumpif (TREE_OPERAND (exp, 0), op0);
8444 jumpifnot (TREE_OPERAND (exp, 0), op0);
8446 start_cleanup_deferral ();
8447 if (binary_op && temp == 0)
8448 /* Just touch the other operand. */
8449 expand_expr (TREE_OPERAND (binary_op, 1),
8450 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8452 store_expr (build (TREE_CODE (binary_op), type,
8453 make_tree (type, temp),
8454 TREE_OPERAND (binary_op, 1)),
8455 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8457 store_expr (build1 (TREE_CODE (unary_op), type,
8458 make_tree (type, temp)),
8459 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
8462 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8463 comparison operator. If we have one of these cases, set the
8464 output to A, branch on A (cse will merge these two references),
8465 then set the output to FOO. */
8467 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8468 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8469 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8470 TREE_OPERAND (exp, 1), 0)
8471 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8472 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8473 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8475 if (GET_CODE (temp) == REG
8476 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8477 temp = gen_reg_rtx (mode);
8478 store_expr (TREE_OPERAND (exp, 1), temp,
8479 modifier == EXPAND_STACK_PARM ? 2 : 0);
8480 jumpif (TREE_OPERAND (exp, 0), op0);
8482 start_cleanup_deferral ();
8483 if (TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8484 store_expr (TREE_OPERAND (exp, 2), temp,
8485 modifier == EXPAND_STACK_PARM ? 2 : 0);
8487 expand_expr (TREE_OPERAND (exp, 2),
8488 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8492 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8493 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8494 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8495 TREE_OPERAND (exp, 2), 0)
8496 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8497 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8498 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8500 if (GET_CODE (temp) == REG
8501 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8502 temp = gen_reg_rtx (mode);
8503 store_expr (TREE_OPERAND (exp, 2), temp,
8504 modifier == EXPAND_STACK_PARM ? 2 : 0);
8505 jumpifnot (TREE_OPERAND (exp, 0), op0);
8507 start_cleanup_deferral ();
8508 if (TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8509 store_expr (TREE_OPERAND (exp, 1), temp,
8510 modifier == EXPAND_STACK_PARM ? 2 : 0);
8512 expand_expr (TREE_OPERAND (exp, 1),
8513 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8518 op1 = gen_label_rtx ();
8519 jumpifnot (TREE_OPERAND (exp, 0), op0);
8521 start_cleanup_deferral ();
8523 /* One branch of the cond can be void, if it never returns. For
8524 example A ? throw : E */
8526 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8527 store_expr (TREE_OPERAND (exp, 1), temp,
8528 modifier == EXPAND_STACK_PARM ? 2 : 0);
8530 expand_expr (TREE_OPERAND (exp, 1),
8531 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8532 end_cleanup_deferral ();
8534 emit_jump_insn (gen_jump (op1));
8537 start_cleanup_deferral ();
8539 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8540 store_expr (TREE_OPERAND (exp, 2), temp,
8541 modifier == EXPAND_STACK_PARM ? 2 : 0);
8543 expand_expr (TREE_OPERAND (exp, 2),
8544 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8547 end_cleanup_deferral ();
8558 /* Something needs to be initialized, but we didn't know
8559 where that thing was when building the tree. For example,
8560 it could be the return value of a function, or a parameter
8561 to a function which lays down in the stack, or a temporary
8562 variable which must be passed by reference.
8564 We guarantee that the expression will either be constructed
8565 or copied into our original target. */
8567 tree slot = TREE_OPERAND (exp, 0);
8568 tree cleanups = NULL_TREE;
8571 if (TREE_CODE (slot) != VAR_DECL)
8575 target = original_target;
8577 /* Set this here so that if we get a target that refers to a
8578 register variable that's already been used, put_reg_into_stack
8579 knows that it should fix up those uses. */
8580 TREE_USED (slot) = 1;
8584 if (DECL_RTL_SET_P (slot))
8586 target = DECL_RTL (slot);
8587 /* If we have already expanded the slot, so don't do
8589 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8594 target = assign_temp (type, 2, 0, 1);
8595 /* All temp slots at this level must not conflict. */
8596 preserve_temp_slots (target);
8597 SET_DECL_RTL (slot, target);
8598 if (TREE_ADDRESSABLE (slot))
8599 put_var_into_stack (slot, /*rescan=*/false);
8601 /* Since SLOT is not known to the called function
8602 to belong to its stack frame, we must build an explicit
8603 cleanup. This case occurs when we must build up a reference
8604 to pass the reference as an argument. In this case,
8605 it is very likely that such a reference need not be
8608 if (TREE_OPERAND (exp, 2) == 0)
8609 TREE_OPERAND (exp, 2)
8610 = (*lang_hooks.maybe_build_cleanup) (slot);
8611 cleanups = TREE_OPERAND (exp, 2);
8616 /* This case does occur, when expanding a parameter which
8617 needs to be constructed on the stack. The target
8618 is the actual stack address that we want to initialize.
8619 The function we call will perform the cleanup in this case. */
8621 /* If we have already assigned it space, use that space,
8622 not target that we were passed in, as our target
8623 parameter is only a hint. */
8624 if (DECL_RTL_SET_P (slot))
8626 target = DECL_RTL (slot);
8627 /* If we have already expanded the slot, so don't do
8629 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8634 SET_DECL_RTL (slot, target);
8635 /* If we must have an addressable slot, then make sure that
8636 the RTL that we just stored in slot is OK. */
8637 if (TREE_ADDRESSABLE (slot))
8638 put_var_into_stack (slot, /*rescan=*/true);
8642 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8643 /* Mark it as expanded. */
8644 TREE_OPERAND (exp, 1) = NULL_TREE;
8646 store_expr (exp1, target, modifier == EXPAND_STACK_PARM ? 2 : 0);
8648 expand_decl_cleanup_eh (NULL_TREE, cleanups, CLEANUP_EH_ONLY (exp));
8655 tree lhs = TREE_OPERAND (exp, 0);
8656 tree rhs = TREE_OPERAND (exp, 1);
8658 temp = expand_assignment (lhs, rhs, ! ignore);
8664 /* If lhs is complex, expand calls in rhs before computing it.
8665 That's so we don't compute a pointer and save it over a
8666 call. If lhs is simple, compute it first so we can give it
8667 as a target if the rhs is just a call. This avoids an
8668 extra temp and copy and that prevents a partial-subsumption
8669 which makes bad code. Actually we could treat
8670 component_ref's of vars like vars. */
8672 tree lhs = TREE_OPERAND (exp, 0);
8673 tree rhs = TREE_OPERAND (exp, 1);
8677 /* Check for |= or &= of a bitfield of size one into another bitfield
8678 of size 1. In this case, (unless we need the result of the
8679 assignment) we can do this more efficiently with a
8680 test followed by an assignment, if necessary.
8682 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8683 things change so we do, this code should be enhanced to
8686 && TREE_CODE (lhs) == COMPONENT_REF
8687 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8688 || TREE_CODE (rhs) == BIT_AND_EXPR)
8689 && TREE_OPERAND (rhs, 0) == lhs
8690 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8691 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8692 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8694 rtx label = gen_label_rtx ();
8696 do_jump (TREE_OPERAND (rhs, 1),
8697 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8698 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8699 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8700 (TREE_CODE (rhs) == BIT_IOR_EXPR
8702 : integer_zero_node)),
8704 do_pending_stack_adjust ();
8709 temp = expand_assignment (lhs, rhs, ! ignore);
8715 if (!TREE_OPERAND (exp, 0))
8716 expand_null_return ();
8718 expand_return (TREE_OPERAND (exp, 0));
8721 case PREINCREMENT_EXPR:
8722 case PREDECREMENT_EXPR:
8723 return expand_increment (exp, 0, ignore);
8725 case POSTINCREMENT_EXPR:
8726 case POSTDECREMENT_EXPR:
8727 /* Faster to treat as pre-increment if result is not used. */
8728 return expand_increment (exp, ! ignore, ignore);
8731 if (modifier == EXPAND_STACK_PARM)
8733 /* Are we taking the address of a nested function? */
8734 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8735 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8736 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8737 && ! TREE_STATIC (exp))
8739 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8740 op0 = force_operand (op0, target);
8742 /* If we are taking the address of something erroneous, just
8744 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8746 /* If we are taking the address of a constant and are at the
8747 top level, we have to use output_constant_def since we can't
8748 call force_const_mem at top level. */
8750 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8751 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
8753 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
8756 /* We make sure to pass const0_rtx down if we came in with
8757 ignore set, to avoid doing the cleanups twice for something. */
8758 op0 = expand_expr (TREE_OPERAND (exp, 0),
8759 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8760 (modifier == EXPAND_INITIALIZER
8761 ? modifier : EXPAND_CONST_ADDRESS));
8763 /* If we are going to ignore the result, OP0 will have been set
8764 to const0_rtx, so just return it. Don't get confused and
8765 think we are taking the address of the constant. */
8769 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8770 clever and returns a REG when given a MEM. */
8771 op0 = protect_from_queue (op0, 1);
8773 /* We would like the object in memory. If it is a constant, we can
8774 have it be statically allocated into memory. For a non-constant,
8775 we need to allocate some memory and store the value into it. */
8777 if (CONSTANT_P (op0))
8778 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8780 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8781 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8782 || GET_CODE (op0) == PARALLEL || GET_CODE (op0) == LO_SUM)
8784 /* If the operand is a SAVE_EXPR, we can deal with this by
8785 forcing the SAVE_EXPR into memory. */
8786 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
8788 put_var_into_stack (TREE_OPERAND (exp, 0),
8790 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
8794 /* If this object is in a register, it can't be BLKmode. */
8795 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8796 rtx memloc = assign_temp (inner_type, 1, 1, 1);
8798 if (GET_CODE (op0) == PARALLEL)
8799 /* Handle calls that pass values in multiple
8800 non-contiguous locations. The Irix 6 ABI has examples
8802 emit_group_store (memloc, op0, inner_type,
8803 int_size_in_bytes (inner_type));
8805 emit_move_insn (memloc, op0);
8811 if (GET_CODE (op0) != MEM)
8814 mark_temp_addr_taken (op0);
8815 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8817 op0 = XEXP (op0, 0);
8818 if (GET_MODE (op0) == Pmode && mode == ptr_mode)
8819 op0 = convert_memory_address (ptr_mode, op0);
8823 /* If OP0 is not aligned as least as much as the type requires, we
8824 need to make a temporary, copy OP0 to it, and take the address of
8825 the temporary. We want to use the alignment of the type, not of
8826 the operand. Note that this is incorrect for FUNCTION_TYPE, but
8827 the test for BLKmode means that can't happen. The test for
8828 BLKmode is because we never make mis-aligned MEMs with
8831 We don't need to do this at all if the machine doesn't have
8832 strict alignment. */
8833 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
8834 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
8836 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
8838 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
8841 if (TYPE_ALIGN_OK (inner_type))
8844 if (TREE_ADDRESSABLE (inner_type))
8846 /* We can't make a bitwise copy of this object, so fail. */
8847 error ("cannot take the address of an unaligned member");
8851 new = assign_stack_temp_for_type
8852 (TYPE_MODE (inner_type),
8853 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
8854 : int_size_in_bytes (inner_type),
8855 1, build_qualified_type (inner_type,
8856 (TYPE_QUALS (inner_type)
8857 | TYPE_QUAL_CONST)));
8859 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)),
8860 (modifier == EXPAND_STACK_PARM
8861 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
8866 op0 = force_operand (XEXP (op0, 0), target);
8870 && GET_CODE (op0) != REG
8871 && modifier != EXPAND_CONST_ADDRESS
8872 && modifier != EXPAND_INITIALIZER
8873 && modifier != EXPAND_SUM)
8874 op0 = force_reg (Pmode, op0);
8876 if (GET_CODE (op0) == REG
8877 && ! REG_USERVAR_P (op0))
8878 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
8880 if (GET_MODE (op0) == Pmode && mode == ptr_mode)
8881 op0 = convert_memory_address (ptr_mode, op0);
8885 case ENTRY_VALUE_EXPR:
8888 /* COMPLEX type for Extended Pascal & Fortran */
8891 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8894 /* Get the rtx code of the operands. */
8895 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8896 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8899 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8903 /* Move the real (op0) and imaginary (op1) parts to their location. */
8904 emit_move_insn (gen_realpart (mode, target), op0);
8905 emit_move_insn (gen_imagpart (mode, target), op1);
8907 insns = get_insns ();
8910 /* Complex construction should appear as a single unit. */
8911 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8912 each with a separate pseudo as destination.
8913 It's not correct for flow to treat them as a unit. */
8914 if (GET_CODE (target) != CONCAT)
8915 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8923 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8924 return gen_realpart (mode, op0);
8927 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8928 return gen_imagpart (mode, op0);
8932 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
8936 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8939 target = gen_reg_rtx (mode);
8943 /* Store the realpart and the negated imagpart to target. */
8944 emit_move_insn (gen_realpart (partmode, target),
8945 gen_realpart (partmode, op0));
8947 imag_t = gen_imagpart (partmode, target);
8948 temp = expand_unop (partmode,
8949 ! unsignedp && flag_trapv
8950 && (GET_MODE_CLASS(partmode) == MODE_INT)
8951 ? negv_optab : neg_optab,
8952 gen_imagpart (partmode, op0), imag_t, 0);
8954 emit_move_insn (imag_t, temp);
8956 insns = get_insns ();
8959 /* Conjugate should appear as a single unit
8960 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
8961 each with a separate pseudo as destination.
8962 It's not correct for flow to treat them as a unit. */
8963 if (GET_CODE (target) != CONCAT)
8964 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8971 case TRY_CATCH_EXPR:
8973 tree handler = TREE_OPERAND (exp, 1);
8975 expand_eh_region_start ();
8977 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8979 expand_eh_region_end_cleanup (handler);
8984 case TRY_FINALLY_EXPR:
8986 tree try_block = TREE_OPERAND (exp, 0);
8987 tree finally_block = TREE_OPERAND (exp, 1);
8989 if (!optimize || unsafe_for_reeval (finally_block) > 1)
8991 /* In this case, wrapping FINALLY_BLOCK in an UNSAVE_EXPR
8992 is not sufficient, so we cannot expand the block twice.
8993 So we play games with GOTO_SUBROUTINE_EXPR to let us
8994 expand the thing only once. */
8995 /* When not optimizing, we go ahead with this form since
8996 (1) user breakpoints operate more predictably without
8997 code duplication, and
8998 (2) we're not running any of the global optimizers
8999 that would explode in time/space with the highly
9000 connected CFG created by the indirect branching. */
9002 rtx finally_label = gen_label_rtx ();
9003 rtx done_label = gen_label_rtx ();
9004 rtx return_link = gen_reg_rtx (Pmode);
9005 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
9006 (tree) finally_label, (tree) return_link);
9007 TREE_SIDE_EFFECTS (cleanup) = 1;
9009 /* Start a new binding layer that will keep track of all cleanup
9010 actions to be performed. */
9011 expand_start_bindings (2);
9012 target_temp_slot_level = temp_slot_level;
9014 expand_decl_cleanup (NULL_TREE, cleanup);
9015 op0 = expand_expr (try_block, target, tmode, modifier);
9017 preserve_temp_slots (op0);
9018 expand_end_bindings (NULL_TREE, 0, 0);
9019 emit_jump (done_label);
9020 emit_label (finally_label);
9021 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
9022 emit_indirect_jump (return_link);
9023 emit_label (done_label);
9027 expand_start_bindings (2);
9028 target_temp_slot_level = temp_slot_level;
9030 expand_decl_cleanup (NULL_TREE, finally_block);
9031 op0 = expand_expr (try_block, target, tmode, modifier);
9033 preserve_temp_slots (op0);
9034 expand_end_bindings (NULL_TREE, 0, 0);
9040 case GOTO_SUBROUTINE_EXPR:
9042 rtx subr = (rtx) TREE_OPERAND (exp, 0);
9043 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
9044 rtx return_address = gen_label_rtx ();
9045 emit_move_insn (return_link,
9046 gen_rtx_LABEL_REF (Pmode, return_address));
9048 emit_label (return_address);
9053 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
9056 return get_exception_pointer (cfun);
9059 /* Function descriptors are not valid except for as
9060 initialization constants, and should not be expanded. */
9064 return (*lang_hooks.expand_expr) (exp, original_target, tmode, modifier,
9068 /* Here to do an ordinary binary operator, generating an instruction
9069 from the optab already placed in `this_optab'. */
9071 expand_operands (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1),
9072 subtarget, &op0, &op1, 0);
9074 if (modifier == EXPAND_STACK_PARM)
9076 temp = expand_binop (mode, this_optab, op0, op1, target,
9077 unsignedp, OPTAB_LIB_WIDEN);
9083 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9084 when applied to the address of EXP produces an address known to be
9085 aligned more than BIGGEST_ALIGNMENT. */
9088 is_aligning_offset (tree offset, tree exp)
9090 /* Strip off any conversions and WITH_RECORD_EXPR nodes. */
9091 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9092 || TREE_CODE (offset) == NOP_EXPR
9093 || TREE_CODE (offset) == CONVERT_EXPR
9094 || TREE_CODE (offset) == WITH_RECORD_EXPR)
9095 offset = TREE_OPERAND (offset, 0);
9097 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9098 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9099 if (TREE_CODE (offset) != BIT_AND_EXPR
9100 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9101 || compare_tree_int (TREE_OPERAND (offset, 1), BIGGEST_ALIGNMENT) <= 0
9102 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9105 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9106 It must be NEGATE_EXPR. Then strip any more conversions. */
9107 offset = TREE_OPERAND (offset, 0);
9108 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9109 || TREE_CODE (offset) == NOP_EXPR
9110 || TREE_CODE (offset) == CONVERT_EXPR)
9111 offset = TREE_OPERAND (offset, 0);
9113 if (TREE_CODE (offset) != NEGATE_EXPR)
9116 offset = TREE_OPERAND (offset, 0);
9117 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9118 || TREE_CODE (offset) == NOP_EXPR
9119 || TREE_CODE (offset) == CONVERT_EXPR)
9120 offset = TREE_OPERAND (offset, 0);
9122 /* This must now be the address either of EXP or of a PLACEHOLDER_EXPR
9123 whose type is the same as EXP. */
9124 return (TREE_CODE (offset) == ADDR_EXPR
9125 && (TREE_OPERAND (offset, 0) == exp
9126 || (TREE_CODE (TREE_OPERAND (offset, 0)) == PLACEHOLDER_EXPR
9127 && (TREE_TYPE (TREE_OPERAND (offset, 0))
9128 == TREE_TYPE (exp)))));
9131 /* Return the tree node if an ARG corresponds to a string constant or zero
9132 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9133 in bytes within the string that ARG is accessing. The type of the
9134 offset will be `sizetype'. */
9137 string_constant (tree arg, tree *ptr_offset)
9141 if (TREE_CODE (arg) == ADDR_EXPR
9142 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9144 *ptr_offset = size_zero_node;
9145 return TREE_OPERAND (arg, 0);
9147 else if (TREE_CODE (arg) == PLUS_EXPR)
9149 tree arg0 = TREE_OPERAND (arg, 0);
9150 tree arg1 = TREE_OPERAND (arg, 1);
9155 if (TREE_CODE (arg0) == ADDR_EXPR
9156 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9158 *ptr_offset = convert (sizetype, arg1);
9159 return TREE_OPERAND (arg0, 0);
9161 else if (TREE_CODE (arg1) == ADDR_EXPR
9162 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9164 *ptr_offset = convert (sizetype, arg0);
9165 return TREE_OPERAND (arg1, 0);
9172 /* Expand code for a post- or pre- increment or decrement
9173 and return the RTX for the result.
9174 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9177 expand_increment (tree exp, int post, int ignore)
9181 tree incremented = TREE_OPERAND (exp, 0);
9182 optab this_optab = add_optab;
9184 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9185 int op0_is_copy = 0;
9186 int single_insn = 0;
9187 /* 1 means we can't store into OP0 directly,
9188 because it is a subreg narrower than a word,
9189 and we don't dare clobber the rest of the word. */
9192 /* Stabilize any component ref that might need to be
9193 evaluated more than once below. */
9195 || TREE_CODE (incremented) == BIT_FIELD_REF
9196 || (TREE_CODE (incremented) == COMPONENT_REF
9197 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9198 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9199 incremented = stabilize_reference (incremented);
9200 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9201 ones into save exprs so that they don't accidentally get evaluated
9202 more than once by the code below. */
9203 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9204 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9205 incremented = save_expr (incremented);
9207 /* Compute the operands as RTX.
9208 Note whether OP0 is the actual lvalue or a copy of it:
9209 I believe it is a copy iff it is a register or subreg
9210 and insns were generated in computing it. */
9212 temp = get_last_insn ();
9213 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
9215 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9216 in place but instead must do sign- or zero-extension during assignment,
9217 so we copy it into a new register and let the code below use it as
9220 Note that we can safely modify this SUBREG since it is know not to be
9221 shared (it was made by the expand_expr call above). */
9223 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9226 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9230 else if (GET_CODE (op0) == SUBREG
9231 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9233 /* We cannot increment this SUBREG in place. If we are
9234 post-incrementing, get a copy of the old value. Otherwise,
9235 just mark that we cannot increment in place. */
9237 op0 = copy_to_reg (op0);
9242 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9243 && temp != get_last_insn ());
9244 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9246 /* Decide whether incrementing or decrementing. */
9247 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9248 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9249 this_optab = sub_optab;
9251 /* Convert decrement by a constant into a negative increment. */
9252 if (this_optab == sub_optab
9253 && GET_CODE (op1) == CONST_INT)
9255 op1 = GEN_INT (-INTVAL (op1));
9256 this_optab = add_optab;
9259 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9260 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9262 /* For a preincrement, see if we can do this with a single instruction. */
9265 icode = (int) this_optab->handlers[(int) mode].insn_code;
9266 if (icode != (int) CODE_FOR_nothing
9267 /* Make sure that OP0 is valid for operands 0 and 1
9268 of the insn we want to queue. */
9269 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9270 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9271 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9275 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9276 then we cannot just increment OP0. We must therefore contrive to
9277 increment the original value. Then, for postincrement, we can return
9278 OP0 since it is a copy of the old value. For preincrement, expand here
9279 unless we can do it with a single insn.
9281 Likewise if storing directly into OP0 would clobber high bits
9282 we need to preserve (bad_subreg). */
9283 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9285 /* This is the easiest way to increment the value wherever it is.
9286 Problems with multiple evaluation of INCREMENTED are prevented
9287 because either (1) it is a component_ref or preincrement,
9288 in which case it was stabilized above, or (2) it is an array_ref
9289 with constant index in an array in a register, which is
9290 safe to reevaluate. */
9291 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9292 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9293 ? MINUS_EXPR : PLUS_EXPR),
9296 TREE_OPERAND (exp, 1));
9298 while (TREE_CODE (incremented) == NOP_EXPR
9299 || TREE_CODE (incremented) == CONVERT_EXPR)
9301 newexp = convert (TREE_TYPE (incremented), newexp);
9302 incremented = TREE_OPERAND (incremented, 0);
9305 temp = expand_assignment (incremented, newexp, ! post && ! ignore);
9306 return post ? op0 : temp;
9311 /* We have a true reference to the value in OP0.
9312 If there is an insn to add or subtract in this mode, queue it.
9313 Queuing the increment insn avoids the register shuffling
9314 that often results if we must increment now and first save
9315 the old value for subsequent use. */
9317 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9318 op0 = stabilize (op0);
9321 icode = (int) this_optab->handlers[(int) mode].insn_code;
9322 if (icode != (int) CODE_FOR_nothing
9323 /* Make sure that OP0 is valid for operands 0 and 1
9324 of the insn we want to queue. */
9325 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9326 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9328 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9329 op1 = force_reg (mode, op1);
9331 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9333 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9335 rtx addr = (general_operand (XEXP (op0, 0), mode)
9336 ? force_reg (Pmode, XEXP (op0, 0))
9337 : copy_to_reg (XEXP (op0, 0)));
9340 op0 = replace_equiv_address (op0, addr);
9341 temp = force_reg (GET_MODE (op0), op0);
9342 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9343 op1 = force_reg (mode, op1);
9345 /* The increment queue is LIFO, thus we have to `queue'
9346 the instructions in reverse order. */
9347 enqueue_insn (op0, gen_move_insn (op0, temp));
9348 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9353 /* Preincrement, or we can't increment with one simple insn. */
9355 /* Save a copy of the value before inc or dec, to return it later. */
9356 temp = value = copy_to_reg (op0);
9358 /* Arrange to return the incremented value. */
9359 /* Copy the rtx because expand_binop will protect from the queue,
9360 and the results of that would be invalid for us to return
9361 if our caller does emit_queue before using our result. */
9362 temp = copy_rtx (value = op0);
9364 /* Increment however we can. */
9365 op1 = expand_binop (mode, this_optab, value, op1, op0,
9366 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9368 /* Make sure the value is stored into OP0. */
9370 emit_move_insn (op0, op1);
9375 /* Generate code to calculate EXP using a store-flag instruction
9376 and return an rtx for the result. EXP is either a comparison
9377 or a TRUTH_NOT_EXPR whose operand is a comparison.
9379 If TARGET is nonzero, store the result there if convenient.
9381 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
9384 Return zero if there is no suitable set-flag instruction
9385 available on this machine.
9387 Once expand_expr has been called on the arguments of the comparison,
9388 we are committed to doing the store flag, since it is not safe to
9389 re-evaluate the expression. We emit the store-flag insn by calling
9390 emit_store_flag, but only expand the arguments if we have a reason
9391 to believe that emit_store_flag will be successful. If we think that
9392 it will, but it isn't, we have to simulate the store-flag with a
9393 set/jump/set sequence. */
9396 do_store_flag (tree exp, rtx target, enum machine_mode mode, int only_cheap)
9399 tree arg0, arg1, type;
9401 enum machine_mode operand_mode;
9405 enum insn_code icode;
9406 rtx subtarget = target;
9409 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9410 result at the end. We can't simply invert the test since it would
9411 have already been inverted if it were valid. This case occurs for
9412 some floating-point comparisons. */
9414 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9415 invert = 1, exp = TREE_OPERAND (exp, 0);
9417 arg0 = TREE_OPERAND (exp, 0);
9418 arg1 = TREE_OPERAND (exp, 1);
9420 /* Don't crash if the comparison was erroneous. */
9421 if (arg0 == error_mark_node || arg1 == error_mark_node)
9424 type = TREE_TYPE (arg0);
9425 operand_mode = TYPE_MODE (type);
9426 unsignedp = TREE_UNSIGNED (type);
9428 /* We won't bother with BLKmode store-flag operations because it would mean
9429 passing a lot of information to emit_store_flag. */
9430 if (operand_mode == BLKmode)
9433 /* We won't bother with store-flag operations involving function pointers
9434 when function pointers must be canonicalized before comparisons. */
9435 #ifdef HAVE_canonicalize_funcptr_for_compare
9436 if (HAVE_canonicalize_funcptr_for_compare
9437 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9438 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9440 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9441 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9442 == FUNCTION_TYPE))))
9449 /* Get the rtx comparison code to use. We know that EXP is a comparison
9450 operation of some type. Some comparisons against 1 and -1 can be
9451 converted to comparisons with zero. Do so here so that the tests
9452 below will be aware that we have a comparison with zero. These
9453 tests will not catch constants in the first operand, but constants
9454 are rarely passed as the first operand. */
9456 switch (TREE_CODE (exp))
9465 if (integer_onep (arg1))
9466 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9468 code = unsignedp ? LTU : LT;
9471 if (! unsignedp && integer_all_onesp (arg1))
9472 arg1 = integer_zero_node, code = LT;
9474 code = unsignedp ? LEU : LE;
9477 if (! unsignedp && integer_all_onesp (arg1))
9478 arg1 = integer_zero_node, code = GE;
9480 code = unsignedp ? GTU : GT;
9483 if (integer_onep (arg1))
9484 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9486 code = unsignedp ? GEU : GE;
9489 case UNORDERED_EXPR:
9515 /* Put a constant second. */
9516 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
9518 tem = arg0; arg0 = arg1; arg1 = tem;
9519 code = swap_condition (code);
9522 /* If this is an equality or inequality test of a single bit, we can
9523 do this by shifting the bit being tested to the low-order bit and
9524 masking the result with the constant 1. If the condition was EQ,
9525 we xor it with 1. This does not require an scc insn and is faster
9526 than an scc insn even if we have it.
9528 The code to make this transformation was moved into fold_single_bit_test,
9529 so we just call into the folder and expand its result. */
9531 if ((code == NE || code == EQ)
9532 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9533 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9535 tree type = (*lang_hooks.types.type_for_mode) (mode, unsignedp);
9536 return expand_expr (fold_single_bit_test (code == NE ? NE_EXPR : EQ_EXPR,
9538 target, VOIDmode, EXPAND_NORMAL);
9541 /* Now see if we are likely to be able to do this. Return if not. */
9542 if (! can_compare_p (code, operand_mode, ccp_store_flag))
9545 icode = setcc_gen_code[(int) code];
9546 if (icode == CODE_FOR_nothing
9547 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
9549 /* We can only do this if it is one of the special cases that
9550 can be handled without an scc insn. */
9551 if ((code == LT && integer_zerop (arg1))
9552 || (! only_cheap && code == GE && integer_zerop (arg1)))
9554 else if (BRANCH_COST >= 0
9555 && ! only_cheap && (code == NE || code == EQ)
9556 && TREE_CODE (type) != REAL_TYPE
9557 && ((abs_optab->handlers[(int) operand_mode].insn_code
9558 != CODE_FOR_nothing)
9559 || (ffs_optab->handlers[(int) operand_mode].insn_code
9560 != CODE_FOR_nothing)))
9566 if (! get_subtarget (target)
9567 || GET_MODE (subtarget) != operand_mode)
9570 expand_operands (arg0, arg1, subtarget, &op0, &op1, 0);
9573 target = gen_reg_rtx (mode);
9575 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
9576 because, if the emit_store_flag does anything it will succeed and
9577 OP0 and OP1 will not be used subsequently. */
9579 result = emit_store_flag (target, code,
9580 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
9581 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
9582 operand_mode, unsignedp, 1);
9587 result = expand_binop (mode, xor_optab, result, const1_rtx,
9588 result, 0, OPTAB_LIB_WIDEN);
9592 /* If this failed, we have to do this with set/compare/jump/set code. */
9593 if (GET_CODE (target) != REG
9594 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9595 target = gen_reg_rtx (GET_MODE (target));
9597 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9598 result = compare_from_rtx (op0, op1, code, unsignedp,
9599 operand_mode, NULL_RTX);
9600 if (GET_CODE (result) == CONST_INT)
9601 return (((result == const0_rtx && ! invert)
9602 || (result != const0_rtx && invert))
9603 ? const0_rtx : const1_rtx);
9605 /* The code of RESULT may not match CODE if compare_from_rtx
9606 decided to swap its operands and reverse the original code.
9608 We know that compare_from_rtx returns either a CONST_INT or
9609 a new comparison code, so it is safe to just extract the
9610 code from RESULT. */
9611 code = GET_CODE (result);
9613 label = gen_label_rtx ();
9614 if (bcc_gen_fctn[(int) code] == 0)
9617 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
9618 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9625 /* Stubs in case we haven't got a casesi insn. */
9627 # define HAVE_casesi 0
9628 # define gen_casesi(a, b, c, d, e) (0)
9629 # define CODE_FOR_casesi CODE_FOR_nothing
9632 /* If the machine does not have a case insn that compares the bounds,
9633 this means extra overhead for dispatch tables, which raises the
9634 threshold for using them. */
9635 #ifndef CASE_VALUES_THRESHOLD
9636 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
9637 #endif /* CASE_VALUES_THRESHOLD */
9640 case_values_threshold (void)
9642 return CASE_VALUES_THRESHOLD;
9645 /* Attempt to generate a casesi instruction. Returns 1 if successful,
9646 0 otherwise (i.e. if there is no casesi instruction). */
9648 try_casesi (tree index_type, tree index_expr, tree minval, tree range,
9649 rtx table_label ATTRIBUTE_UNUSED, rtx default_label)
9651 enum machine_mode index_mode = SImode;
9652 int index_bits = GET_MODE_BITSIZE (index_mode);
9653 rtx op1, op2, index;
9654 enum machine_mode op_mode;
9659 /* Convert the index to SImode. */
9660 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
9662 enum machine_mode omode = TYPE_MODE (index_type);
9663 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
9665 /* We must handle the endpoints in the original mode. */
9666 index_expr = build (MINUS_EXPR, index_type,
9667 index_expr, minval);
9668 minval = integer_zero_node;
9669 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9670 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
9671 omode, 1, default_label);
9672 /* Now we can safely truncate. */
9673 index = convert_to_mode (index_mode, index, 0);
9677 if (TYPE_MODE (index_type) != index_mode)
9679 index_expr = convert ((*lang_hooks.types.type_for_size)
9680 (index_bits, 0), index_expr);
9681 index_type = TREE_TYPE (index_expr);
9684 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9687 index = protect_from_queue (index, 0);
9688 do_pending_stack_adjust ();
9690 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
9691 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
9693 index = copy_to_mode_reg (op_mode, index);
9695 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
9697 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
9698 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
9699 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
9700 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
9702 op1 = copy_to_mode_reg (op_mode, op1);
9704 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
9706 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
9707 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
9708 op2, TREE_UNSIGNED (TREE_TYPE (range)));
9709 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
9711 op2 = copy_to_mode_reg (op_mode, op2);
9713 emit_jump_insn (gen_casesi (index, op1, op2,
9714 table_label, default_label));
9718 /* Attempt to generate a tablejump instruction; same concept. */
9719 #ifndef HAVE_tablejump
9720 #define HAVE_tablejump 0
9721 #define gen_tablejump(x, y) (0)
9724 /* Subroutine of the next function.
9726 INDEX is the value being switched on, with the lowest value
9727 in the table already subtracted.
9728 MODE is its expected mode (needed if INDEX is constant).
9729 RANGE is the length of the jump table.
9730 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
9732 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9733 index value is out of range. */
9736 do_tablejump (rtx index, enum machine_mode mode, rtx range, rtx table_label,
9741 if (INTVAL (range) > cfun->max_jumptable_ents)
9742 cfun->max_jumptable_ents = INTVAL (range);
9744 /* Do an unsigned comparison (in the proper mode) between the index
9745 expression and the value which represents the length of the range.
9746 Since we just finished subtracting the lower bound of the range
9747 from the index expression, this comparison allows us to simultaneously
9748 check that the original index expression value is both greater than
9749 or equal to the minimum value of the range and less than or equal to
9750 the maximum value of the range. */
9752 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
9755 /* If index is in range, it must fit in Pmode.
9756 Convert to Pmode so we can index with it. */
9758 index = convert_to_mode (Pmode, index, 1);
9760 /* Don't let a MEM slip through, because then INDEX that comes
9761 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9762 and break_out_memory_refs will go to work on it and mess it up. */
9763 #ifdef PIC_CASE_VECTOR_ADDRESS
9764 if (flag_pic && GET_CODE (index) != REG)
9765 index = copy_to_mode_reg (Pmode, index);
9768 /* If flag_force_addr were to affect this address
9769 it could interfere with the tricky assumptions made
9770 about addresses that contain label-refs,
9771 which may be valid only very near the tablejump itself. */
9772 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9773 GET_MODE_SIZE, because this indicates how large insns are. The other
9774 uses should all be Pmode, because they are addresses. This code
9775 could fail if addresses and insns are not the same size. */
9776 index = gen_rtx_PLUS (Pmode,
9777 gen_rtx_MULT (Pmode, index,
9778 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9779 gen_rtx_LABEL_REF (Pmode, table_label));
9780 #ifdef PIC_CASE_VECTOR_ADDRESS
9782 index = PIC_CASE_VECTOR_ADDRESS (index);
9785 index = memory_address_noforce (CASE_VECTOR_MODE, index);
9786 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9787 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
9788 RTX_UNCHANGING_P (vector) = 1;
9789 MEM_NOTRAP_P (vector) = 1;
9790 convert_move (temp, vector, 0);
9792 emit_jump_insn (gen_tablejump (temp, table_label));
9794 /* If we are generating PIC code or if the table is PC-relative, the
9795 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9796 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
9801 try_tablejump (tree index_type, tree index_expr, tree minval, tree range,
9802 rtx table_label, rtx default_label)
9806 if (! HAVE_tablejump)
9809 index_expr = fold (build (MINUS_EXPR, index_type,
9810 convert (index_type, index_expr),
9811 convert (index_type, minval)));
9812 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
9814 index = protect_from_queue (index, 0);
9815 do_pending_stack_adjust ();
9817 do_tablejump (index, TYPE_MODE (index_type),
9818 convert_modes (TYPE_MODE (index_type),
9819 TYPE_MODE (TREE_TYPE (range)),
9820 expand_expr (range, NULL_RTX,
9822 TREE_UNSIGNED (TREE_TYPE (range))),
9823 table_label, default_label);
9827 /* Nonzero if the mode is a valid vector mode for this architecture.
9828 This returns nonzero even if there is no hardware support for the
9829 vector mode, but we can emulate with narrower modes. */
9832 vector_mode_valid_p (enum machine_mode mode)
9834 enum mode_class class = GET_MODE_CLASS (mode);
9835 enum machine_mode innermode;
9837 /* Doh! What's going on? */
9838 if (class != MODE_VECTOR_INT
9839 && class != MODE_VECTOR_FLOAT)
9842 /* Hardware support. Woo hoo! */
9843 if (VECTOR_MODE_SUPPORTED_P (mode))
9846 innermode = GET_MODE_INNER (mode);
9848 /* We should probably return 1 if requesting V4DI and we have no DI,
9849 but we have V2DI, but this is probably very unlikely. */
9851 /* If we have support for the inner mode, we can safely emulate it.
9852 We may not have V2DI, but me can emulate with a pair of DIs. */
9853 return mov_optab->handlers[innermode].insn_code != CODE_FOR_nothing;
9856 /* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
9858 const_vector_from_tree (tree exp)
9863 enum machine_mode inner, mode;
9865 mode = TYPE_MODE (TREE_TYPE (exp));
9867 if (is_zeros_p (exp))
9868 return CONST0_RTX (mode);
9870 units = GET_MODE_NUNITS (mode);
9871 inner = GET_MODE_INNER (mode);
9873 v = rtvec_alloc (units);
9875 link = TREE_VECTOR_CST_ELTS (exp);
9876 for (i = 0; link; link = TREE_CHAIN (link), ++i)
9878 elt = TREE_VALUE (link);
9880 if (TREE_CODE (elt) == REAL_CST)
9881 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
9884 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
9885 TREE_INT_CST_HIGH (elt),
9889 /* Initialize remaining elements to 0. */
9890 for (; i < units; ++i)
9891 RTVEC_ELT (v, i) = CONST0_RTX (inner);
9893 return gen_rtx_raw_CONST_VECTOR (mode, v);
9896 #include "gt-expr.h"