1 /* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
30 #include "hard-reg-set.h"
33 #include "insn-config.h"
34 #include "insn-attr.h"
35 /* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
42 #include "typeclass.h"
45 #include "langhooks.h"
49 /* Decide whether a function's arguments should be processed
50 from first to last or from last to first.
52 They should if the stack and args grow in opposite directions, but
53 only if we have push insns. */
57 #ifndef PUSH_ARGS_REVERSED
58 #if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
59 #define PUSH_ARGS_REVERSED /* If it's last to first. */
65 #ifndef STACK_PUSH_CODE
66 #ifdef STACK_GROWS_DOWNWARD
67 #define STACK_PUSH_CODE PRE_DEC
69 #define STACK_PUSH_CODE PRE_INC
73 /* Assume that case vectors are not pc-relative. */
74 #ifndef CASE_VECTOR_PC_RELATIVE
75 #define CASE_VECTOR_PC_RELATIVE 0
78 /* Convert defined/undefined to boolean. */
79 #ifdef TARGET_MEM_FUNCTIONS
80 #undef TARGET_MEM_FUNCTIONS
81 #define TARGET_MEM_FUNCTIONS 1
83 #define TARGET_MEM_FUNCTIONS 0
87 /* If this is nonzero, we do not bother generating VOLATILE
88 around volatile memory references, and we are willing to
89 output indirect addresses. If cse is to follow, we reject
90 indirect addresses so a useful potential cse is generated;
91 if it is used only once, instruction combination will produce
92 the same indirect address eventually. */
95 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
96 static tree placeholder_list = 0;
98 /* This structure is used by move_by_pieces to describe the move to
100 struct move_by_pieces
109 int explicit_inc_from;
110 unsigned HOST_WIDE_INT len;
111 HOST_WIDE_INT offset;
115 /* This structure is used by store_by_pieces to describe the clear to
118 struct store_by_pieces
124 unsigned HOST_WIDE_INT len;
125 HOST_WIDE_INT offset;
126 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
131 static rtx enqueue_insn PARAMS ((rtx, rtx));
132 static unsigned HOST_WIDE_INT move_by_pieces_ninsns
133 PARAMS ((unsigned HOST_WIDE_INT,
135 static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
136 struct move_by_pieces *));
137 static bool block_move_libcall_safe_for_call_parm PARAMS ((void));
138 static bool emit_block_move_via_movstr PARAMS ((rtx, rtx, rtx, unsigned));
139 static rtx emit_block_move_via_libcall PARAMS ((rtx, rtx, rtx));
140 static tree emit_block_move_libcall_fn PARAMS ((int));
141 static void emit_block_move_via_loop PARAMS ((rtx, rtx, rtx, unsigned));
142 static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT,
144 static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
146 static void store_by_pieces_1 PARAMS ((struct store_by_pieces *,
148 static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...),
150 struct store_by_pieces *));
151 static bool clear_storage_via_clrstr PARAMS ((rtx, rtx, unsigned));
152 static rtx clear_storage_via_libcall PARAMS ((rtx, rtx));
153 static tree clear_storage_libcall_fn PARAMS ((int));
154 static rtx compress_float_constant PARAMS ((rtx, rtx));
155 static rtx get_subtarget PARAMS ((rtx));
156 static int is_zeros_p PARAMS ((tree));
157 static int mostly_zeros_p PARAMS ((tree));
158 static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
159 HOST_WIDE_INT, enum machine_mode,
160 tree, tree, int, int));
161 static void store_constructor PARAMS ((tree, rtx, int, HOST_WIDE_INT));
162 static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
163 HOST_WIDE_INT, enum machine_mode,
164 tree, enum machine_mode, int, tree,
166 static rtx var_rtx PARAMS ((tree));
167 static HOST_WIDE_INT highest_pow2_factor PARAMS ((tree));
168 static HOST_WIDE_INT highest_pow2_factor_for_type PARAMS ((tree, tree));
169 static int is_aligning_offset PARAMS ((tree, tree));
170 static rtx expand_increment PARAMS ((tree, int, int));
171 static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
172 static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
173 static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
175 static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
177 static void emit_single_push_insn PARAMS ((enum machine_mode, rtx, tree));
179 static void do_tablejump PARAMS ((rtx, enum machine_mode, rtx, rtx, rtx));
181 /* Record for each mode whether we can move a register directly to or
182 from an object of that mode in memory. If we can't, we won't try
183 to use that mode directly when accessing a field of that mode. */
185 static char direct_load[NUM_MACHINE_MODES];
186 static char direct_store[NUM_MACHINE_MODES];
188 /* Record for each mode whether we can float-extend from memory. */
190 static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
192 /* If a memory-to-memory move would take MOVE_RATIO or more simple
193 move-instruction sequences, we will do a movstr or libcall instead. */
196 #if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
199 /* If we are optimizing for space (-Os), cut down the default move ratio. */
200 #define MOVE_RATIO (optimize_size ? 3 : 15)
204 /* This macro is used to determine whether move_by_pieces should be called
205 to perform a structure copy. */
206 #ifndef MOVE_BY_PIECES_P
207 #define MOVE_BY_PIECES_P(SIZE, ALIGN) \
208 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
211 /* If a clear memory operation would take CLEAR_RATIO or more simple
212 move-instruction sequences, we will do a clrstr or libcall instead. */
215 #if defined (HAVE_clrstrqi) || defined (HAVE_clrstrhi) || defined (HAVE_clrstrsi) || defined (HAVE_clrstrdi) || defined (HAVE_clrstrti)
216 #define CLEAR_RATIO 2
218 /* If we are optimizing for space, cut down the default clear ratio. */
219 #define CLEAR_RATIO (optimize_size ? 3 : 15)
223 /* This macro is used to determine whether clear_by_pieces should be
224 called to clear storage. */
225 #ifndef CLEAR_BY_PIECES_P
226 #define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
227 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
230 /* This array records the insn_code of insns to perform block moves. */
231 enum insn_code movstr_optab[NUM_MACHINE_MODES];
233 /* This array records the insn_code of insns to perform block clears. */
234 enum insn_code clrstr_optab[NUM_MACHINE_MODES];
236 /* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
238 #ifndef SLOW_UNALIGNED_ACCESS
239 #define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
242 /* This is run once per compilation to set up which modes can be used
243 directly in memory and to initialize the block move optab. */
249 enum machine_mode mode;
254 /* Try indexing by frame ptr and try by stack ptr.
255 It is known that on the Convex the stack ptr isn't a valid index.
256 With luck, one or the other is valid on any machine. */
257 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
258 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
260 /* A scratch register we can modify in-place below to avoid
261 useless RTL allocations. */
262 reg = gen_rtx_REG (VOIDmode, -1);
264 insn = rtx_alloc (INSN);
265 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
266 PATTERN (insn) = pat;
268 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
269 mode = (enum machine_mode) ((int) mode + 1))
273 direct_load[(int) mode] = direct_store[(int) mode] = 0;
274 PUT_MODE (mem, mode);
275 PUT_MODE (mem1, mode);
276 PUT_MODE (reg, mode);
278 /* See if there is some register that can be used in this mode and
279 directly loaded or stored from memory. */
281 if (mode != VOIDmode && mode != BLKmode)
282 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
283 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
286 if (! HARD_REGNO_MODE_OK (regno, mode))
292 SET_DEST (pat) = reg;
293 if (recog (pat, insn, &num_clobbers) >= 0)
294 direct_load[(int) mode] = 1;
296 SET_SRC (pat) = mem1;
297 SET_DEST (pat) = reg;
298 if (recog (pat, insn, &num_clobbers) >= 0)
299 direct_load[(int) mode] = 1;
302 SET_DEST (pat) = mem;
303 if (recog (pat, insn, &num_clobbers) >= 0)
304 direct_store[(int) mode] = 1;
307 SET_DEST (pat) = mem1;
308 if (recog (pat, insn, &num_clobbers) >= 0)
309 direct_store[(int) mode] = 1;
313 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
315 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
316 mode = GET_MODE_WIDER_MODE (mode))
318 enum machine_mode srcmode;
319 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
320 srcmode = GET_MODE_WIDER_MODE (srcmode))
324 ic = can_extend_p (mode, srcmode, 0);
325 if (ic == CODE_FOR_nothing)
328 PUT_MODE (mem, srcmode);
330 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
331 float_extend_from_mem[mode][srcmode] = true;
336 /* This is run at the start of compiling a function. */
341 cfun->expr = (struct expr_status *) ggc_alloc (sizeof (struct expr_status));
344 pending_stack_adjust = 0;
345 stack_pointer_delta = 0;
346 inhibit_defer_pop = 0;
348 apply_args_value = 0;
352 /* Small sanity check that the queue is empty at the end of a function. */
355 finish_expr_for_function ()
361 /* Manage the queue of increment instructions to be output
362 for POSTINCREMENT_EXPR expressions, etc. */
364 /* Queue up to increment (or change) VAR later. BODY says how:
365 BODY should be the same thing you would pass to emit_insn
366 to increment right away. It will go to emit_insn later on.
368 The value is a QUEUED expression to be used in place of VAR
369 where you want to guarantee the pre-incrementation value of VAR. */
372 enqueue_insn (var, body)
375 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
376 body, pending_chain);
377 return pending_chain;
380 /* Use protect_from_queue to convert a QUEUED expression
381 into something that you can put immediately into an instruction.
382 If the queued incrementation has not happened yet,
383 protect_from_queue returns the variable itself.
384 If the incrementation has happened, protect_from_queue returns a temp
385 that contains a copy of the old value of the variable.
387 Any time an rtx which might possibly be a QUEUED is to be put
388 into an instruction, it must be passed through protect_from_queue first.
389 QUEUED expressions are not meaningful in instructions.
391 Do not pass a value through protect_from_queue and then hold
392 on to it for a while before putting it in an instruction!
393 If the queue is flushed in between, incorrect code will result. */
396 protect_from_queue (x, modify)
400 RTX_CODE code = GET_CODE (x);
402 #if 0 /* A QUEUED can hang around after the queue is forced out. */
403 /* Shortcut for most common case. */
404 if (pending_chain == 0)
410 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
411 use of autoincrement. Make a copy of the contents of the memory
412 location rather than a copy of the address, but not if the value is
413 of mode BLKmode. Don't modify X in place since it might be
415 if (code == MEM && GET_MODE (x) != BLKmode
416 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
419 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
423 rtx temp = gen_reg_rtx (GET_MODE (x));
425 emit_insn_before (gen_move_insn (temp, new),
430 /* Copy the address into a pseudo, so that the returned value
431 remains correct across calls to emit_queue. */
432 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
435 /* Otherwise, recursively protect the subexpressions of all
436 the kinds of rtx's that can contain a QUEUED. */
439 rtx tem = protect_from_queue (XEXP (x, 0), 0);
440 if (tem != XEXP (x, 0))
446 else if (code == PLUS || code == MULT)
448 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
449 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
450 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
459 /* If the increment has not happened, use the variable itself. Copy it
460 into a new pseudo so that the value remains correct across calls to
462 if (QUEUED_INSN (x) == 0)
463 return copy_to_reg (QUEUED_VAR (x));
464 /* If the increment has happened and a pre-increment copy exists,
466 if (QUEUED_COPY (x) != 0)
467 return QUEUED_COPY (x);
468 /* The increment has happened but we haven't set up a pre-increment copy.
469 Set one up now, and use it. */
470 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
471 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
473 return QUEUED_COPY (x);
476 /* Return nonzero if X contains a QUEUED expression:
477 if it contains anything that will be altered by a queued increment.
478 We handle only combinations of MEM, PLUS, MINUS and MULT operators
479 since memory addresses generally contain only those. */
485 enum rtx_code code = GET_CODE (x);
491 return queued_subexp_p (XEXP (x, 0));
495 return (queued_subexp_p (XEXP (x, 0))
496 || queued_subexp_p (XEXP (x, 1)));
502 /* Perform all the pending incrementations. */
508 while ((p = pending_chain))
510 rtx body = QUEUED_BODY (p);
512 switch (GET_CODE (body))
520 QUEUED_INSN (p) = body;
524 #ifdef ENABLE_CHECKING
531 QUEUED_INSN (p) = emit_insn (body);
535 pending_chain = QUEUED_NEXT (p);
539 /* Copy data from FROM to TO, where the machine modes are not the same.
540 Both modes may be integer, or both may be floating.
541 UNSIGNEDP should be nonzero if FROM is an unsigned type.
542 This causes zero-extension instead of sign-extension. */
545 convert_move (to, from, unsignedp)
549 enum machine_mode to_mode = GET_MODE (to);
550 enum machine_mode from_mode = GET_MODE (from);
551 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
552 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
556 /* rtx code for making an equivalent value. */
557 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
558 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
560 to = protect_from_queue (to, 1);
561 from = protect_from_queue (from, 0);
563 if (to_real != from_real)
566 /* If FROM is a SUBREG that indicates that we have already done at least
567 the required extension, strip it. We don't handle such SUBREGs as
570 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
571 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
572 >= GET_MODE_SIZE (to_mode))
573 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
574 from = gen_lowpart (to_mode, from), from_mode = to_mode;
576 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
579 if (to_mode == from_mode
580 || (from_mode == VOIDmode && CONSTANT_P (from)))
582 emit_move_insn (to, from);
586 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
588 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
591 if (VECTOR_MODE_P (to_mode))
592 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
594 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
596 emit_move_insn (to, from);
600 if (to_real != from_real)
607 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
609 /* Try converting directly if the insn is supported. */
610 if ((code = can_extend_p (to_mode, from_mode, 0))
613 emit_unop_insn (code, to, from, UNKNOWN);
618 #ifdef HAVE_trunchfqf2
619 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
621 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
625 #ifdef HAVE_trunctqfqf2
626 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
628 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
632 #ifdef HAVE_truncsfqf2
633 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
635 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
639 #ifdef HAVE_truncdfqf2
640 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
642 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
646 #ifdef HAVE_truncxfqf2
647 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
649 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
653 #ifdef HAVE_trunctfqf2
654 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
656 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
661 #ifdef HAVE_trunctqfhf2
662 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
664 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
668 #ifdef HAVE_truncsfhf2
669 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
671 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
675 #ifdef HAVE_truncdfhf2
676 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
678 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
682 #ifdef HAVE_truncxfhf2
683 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
685 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
689 #ifdef HAVE_trunctfhf2
690 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
692 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
697 #ifdef HAVE_truncsftqf2
698 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
700 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
704 #ifdef HAVE_truncdftqf2
705 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
707 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
711 #ifdef HAVE_truncxftqf2
712 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
714 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
718 #ifdef HAVE_trunctftqf2
719 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
721 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
726 #ifdef HAVE_truncdfsf2
727 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
729 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
733 #ifdef HAVE_truncxfsf2
734 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
736 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
740 #ifdef HAVE_trunctfsf2
741 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
743 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
747 #ifdef HAVE_truncxfdf2
748 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
750 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
754 #ifdef HAVE_trunctfdf2
755 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
757 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
769 libcall = extendsfdf2_libfunc;
773 libcall = extendsfxf2_libfunc;
777 libcall = extendsftf2_libfunc;
789 libcall = truncdfsf2_libfunc;
793 libcall = extenddfxf2_libfunc;
797 libcall = extenddftf2_libfunc;
809 libcall = truncxfsf2_libfunc;
813 libcall = truncxfdf2_libfunc;
825 libcall = trunctfsf2_libfunc;
829 libcall = trunctfdf2_libfunc;
841 if (libcall == (rtx) 0)
842 /* This conversion is not implemented yet. */
846 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
848 insns = get_insns ();
850 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
855 /* Now both modes are integers. */
857 /* Handle expanding beyond a word. */
858 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
859 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
866 enum machine_mode lowpart_mode;
867 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
869 /* Try converting directly if the insn is supported. */
870 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
873 /* If FROM is a SUBREG, put it into a register. Do this
874 so that we always generate the same set of insns for
875 better cse'ing; if an intermediate assignment occurred,
876 we won't be doing the operation directly on the SUBREG. */
877 if (optimize > 0 && GET_CODE (from) == SUBREG)
878 from = force_reg (from_mode, from);
879 emit_unop_insn (code, to, from, equiv_code);
882 /* Next, try converting via full word. */
883 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
884 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
885 != CODE_FOR_nothing))
887 if (GET_CODE (to) == REG)
888 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
889 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
890 emit_unop_insn (code, to,
891 gen_lowpart (word_mode, to), equiv_code);
895 /* No special multiword conversion insn; do it by hand. */
898 /* Since we will turn this into a no conflict block, we must ensure
899 that the source does not overlap the target. */
901 if (reg_overlap_mentioned_p (to, from))
902 from = force_reg (from_mode, from);
904 /* Get a copy of FROM widened to a word, if necessary. */
905 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
906 lowpart_mode = word_mode;
908 lowpart_mode = from_mode;
910 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
912 lowpart = gen_lowpart (lowpart_mode, to);
913 emit_move_insn (lowpart, lowfrom);
915 /* Compute the value to put in each remaining word. */
917 fill_value = const0_rtx;
922 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
923 && STORE_FLAG_VALUE == -1)
925 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
927 fill_value = gen_reg_rtx (word_mode);
928 emit_insn (gen_slt (fill_value));
934 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
935 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
937 fill_value = convert_to_mode (word_mode, fill_value, 1);
941 /* Fill the remaining words. */
942 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
944 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
945 rtx subword = operand_subword (to, index, 1, to_mode);
950 if (fill_value != subword)
951 emit_move_insn (subword, fill_value);
954 insns = get_insns ();
957 emit_no_conflict_block (insns, to, from, NULL_RTX,
958 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
962 /* Truncating multi-word to a word or less. */
963 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
964 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
966 if (!((GET_CODE (from) == MEM
967 && ! MEM_VOLATILE_P (from)
968 && direct_load[(int) to_mode]
969 && ! mode_dependent_address_p (XEXP (from, 0)))
970 || GET_CODE (from) == REG
971 || GET_CODE (from) == SUBREG))
972 from = force_reg (from_mode, from);
973 convert_move (to, gen_lowpart (word_mode, from), 0);
977 /* Handle pointer conversion. */ /* SPEE 900220. */
978 if (to_mode == PQImode)
980 if (from_mode != QImode)
981 from = convert_to_mode (QImode, from, unsignedp);
983 #ifdef HAVE_truncqipqi2
984 if (HAVE_truncqipqi2)
986 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
989 #endif /* HAVE_truncqipqi2 */
993 if (from_mode == PQImode)
995 if (to_mode != QImode)
997 from = convert_to_mode (QImode, from, unsignedp);
1002 #ifdef HAVE_extendpqiqi2
1003 if (HAVE_extendpqiqi2)
1005 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
1008 #endif /* HAVE_extendpqiqi2 */
1013 if (to_mode == PSImode)
1015 if (from_mode != SImode)
1016 from = convert_to_mode (SImode, from, unsignedp);
1018 #ifdef HAVE_truncsipsi2
1019 if (HAVE_truncsipsi2)
1021 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
1024 #endif /* HAVE_truncsipsi2 */
1028 if (from_mode == PSImode)
1030 if (to_mode != SImode)
1032 from = convert_to_mode (SImode, from, unsignedp);
1037 #ifdef HAVE_extendpsisi2
1038 if (! unsignedp && HAVE_extendpsisi2)
1040 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
1043 #endif /* HAVE_extendpsisi2 */
1044 #ifdef HAVE_zero_extendpsisi2
1045 if (unsignedp && HAVE_zero_extendpsisi2)
1047 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
1050 #endif /* HAVE_zero_extendpsisi2 */
1055 if (to_mode == PDImode)
1057 if (from_mode != DImode)
1058 from = convert_to_mode (DImode, from, unsignedp);
1060 #ifdef HAVE_truncdipdi2
1061 if (HAVE_truncdipdi2)
1063 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1066 #endif /* HAVE_truncdipdi2 */
1070 if (from_mode == PDImode)
1072 if (to_mode != DImode)
1074 from = convert_to_mode (DImode, from, unsignedp);
1079 #ifdef HAVE_extendpdidi2
1080 if (HAVE_extendpdidi2)
1082 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1085 #endif /* HAVE_extendpdidi2 */
1090 /* Now follow all the conversions between integers
1091 no more than a word long. */
1093 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1094 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1095 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1096 GET_MODE_BITSIZE (from_mode)))
1098 if (!((GET_CODE (from) == MEM
1099 && ! MEM_VOLATILE_P (from)
1100 && direct_load[(int) to_mode]
1101 && ! mode_dependent_address_p (XEXP (from, 0)))
1102 || GET_CODE (from) == REG
1103 || GET_CODE (from) == SUBREG))
1104 from = force_reg (from_mode, from);
1105 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1106 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1107 from = copy_to_reg (from);
1108 emit_move_insn (to, gen_lowpart (to_mode, from));
1112 /* Handle extension. */
1113 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1115 /* Convert directly if that works. */
1116 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1117 != CODE_FOR_nothing)
1120 from = force_not_mem (from);
1122 emit_unop_insn (code, to, from, equiv_code);
1127 enum machine_mode intermediate;
1131 /* Search for a mode to convert via. */
1132 for (intermediate = from_mode; intermediate != VOIDmode;
1133 intermediate = GET_MODE_WIDER_MODE (intermediate))
1134 if (((can_extend_p (to_mode, intermediate, unsignedp)
1135 != CODE_FOR_nothing)
1136 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1137 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1138 GET_MODE_BITSIZE (intermediate))))
1139 && (can_extend_p (intermediate, from_mode, unsignedp)
1140 != CODE_FOR_nothing))
1142 convert_move (to, convert_to_mode (intermediate, from,
1143 unsignedp), unsignedp);
1147 /* No suitable intermediate mode.
1148 Generate what we need with shifts. */
1149 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1150 - GET_MODE_BITSIZE (from_mode), 0);
1151 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1152 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1154 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1157 emit_move_insn (to, tmp);
1162 /* Support special truncate insns for certain modes. */
1164 if (from_mode == DImode && to_mode == SImode)
1166 #ifdef HAVE_truncdisi2
1167 if (HAVE_truncdisi2)
1169 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1173 convert_move (to, force_reg (from_mode, from), unsignedp);
1177 if (from_mode == DImode && to_mode == HImode)
1179 #ifdef HAVE_truncdihi2
1180 if (HAVE_truncdihi2)
1182 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1186 convert_move (to, force_reg (from_mode, from), unsignedp);
1190 if (from_mode == DImode && to_mode == QImode)
1192 #ifdef HAVE_truncdiqi2
1193 if (HAVE_truncdiqi2)
1195 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1199 convert_move (to, force_reg (from_mode, from), unsignedp);
1203 if (from_mode == SImode && to_mode == HImode)
1205 #ifdef HAVE_truncsihi2
1206 if (HAVE_truncsihi2)
1208 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1212 convert_move (to, force_reg (from_mode, from), unsignedp);
1216 if (from_mode == SImode && to_mode == QImode)
1218 #ifdef HAVE_truncsiqi2
1219 if (HAVE_truncsiqi2)
1221 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1225 convert_move (to, force_reg (from_mode, from), unsignedp);
1229 if (from_mode == HImode && to_mode == QImode)
1231 #ifdef HAVE_trunchiqi2
1232 if (HAVE_trunchiqi2)
1234 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1238 convert_move (to, force_reg (from_mode, from), unsignedp);
1242 if (from_mode == TImode && to_mode == DImode)
1244 #ifdef HAVE_trunctidi2
1245 if (HAVE_trunctidi2)
1247 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1251 convert_move (to, force_reg (from_mode, from), unsignedp);
1255 if (from_mode == TImode && to_mode == SImode)
1257 #ifdef HAVE_trunctisi2
1258 if (HAVE_trunctisi2)
1260 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1264 convert_move (to, force_reg (from_mode, from), unsignedp);
1268 if (from_mode == TImode && to_mode == HImode)
1270 #ifdef HAVE_trunctihi2
1271 if (HAVE_trunctihi2)
1273 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1277 convert_move (to, force_reg (from_mode, from), unsignedp);
1281 if (from_mode == TImode && to_mode == QImode)
1283 #ifdef HAVE_trunctiqi2
1284 if (HAVE_trunctiqi2)
1286 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1290 convert_move (to, force_reg (from_mode, from), unsignedp);
1294 /* Handle truncation of volatile memrefs, and so on;
1295 the things that couldn't be truncated directly,
1296 and for which there was no special instruction. */
1297 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1299 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1300 emit_move_insn (to, temp);
1304 /* Mode combination is not recognized. */
1308 /* Return an rtx for a value that would result
1309 from converting X to mode MODE.
1310 Both X and MODE may be floating, or both integer.
1311 UNSIGNEDP is nonzero if X is an unsigned value.
1312 This can be done by referring to a part of X in place
1313 or by copying to a new temporary with conversion.
1315 This function *must not* call protect_from_queue
1316 except when putting X into an insn (in which case convert_move does it). */
1319 convert_to_mode (mode, x, unsignedp)
1320 enum machine_mode mode;
1324 return convert_modes (mode, VOIDmode, x, unsignedp);
1327 /* Return an rtx for a value that would result
1328 from converting X from mode OLDMODE to mode MODE.
1329 Both modes may be floating, or both integer.
1330 UNSIGNEDP is nonzero if X is an unsigned value.
1332 This can be done by referring to a part of X in place
1333 or by copying to a new temporary with conversion.
1335 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1337 This function *must not* call protect_from_queue
1338 except when putting X into an insn (in which case convert_move does it). */
1341 convert_modes (mode, oldmode, x, unsignedp)
1342 enum machine_mode mode, oldmode;
1348 /* If FROM is a SUBREG that indicates that we have already done at least
1349 the required extension, strip it. */
1351 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1352 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1353 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1354 x = gen_lowpart (mode, x);
1356 if (GET_MODE (x) != VOIDmode)
1357 oldmode = GET_MODE (x);
1359 if (mode == oldmode)
1362 /* There is one case that we must handle specially: If we are converting
1363 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
1364 we are to interpret the constant as unsigned, gen_lowpart will do
1365 the wrong if the constant appears negative. What we want to do is
1366 make the high-order word of the constant zero, not all ones. */
1368 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
1369 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
1370 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
1372 HOST_WIDE_INT val = INTVAL (x);
1374 if (oldmode != VOIDmode
1375 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1377 int width = GET_MODE_BITSIZE (oldmode);
1379 /* We need to zero extend VAL. */
1380 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1383 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1386 /* We can do this with a gen_lowpart if both desired and current modes
1387 are integer, and this is either a constant integer, a register, or a
1388 non-volatile MEM. Except for the constant case where MODE is no
1389 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
1391 if ((GET_CODE (x) == CONST_INT
1392 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
1393 || (GET_MODE_CLASS (mode) == MODE_INT
1394 && GET_MODE_CLASS (oldmode) == MODE_INT
1395 && (GET_CODE (x) == CONST_DOUBLE
1396 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
1397 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1398 && direct_load[(int) mode])
1399 || (GET_CODE (x) == REG
1400 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1401 GET_MODE_BITSIZE (GET_MODE (x)))))))))
1403 /* ?? If we don't know OLDMODE, we have to assume here that
1404 X does not need sign- or zero-extension. This may not be
1405 the case, but it's the best we can do. */
1406 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1407 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1409 HOST_WIDE_INT val = INTVAL (x);
1410 int width = GET_MODE_BITSIZE (oldmode);
1412 /* We must sign or zero-extend in this case. Start by
1413 zero-extending, then sign extend if we need to. */
1414 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1416 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1417 val |= (HOST_WIDE_INT) (-1) << width;
1419 return gen_int_mode (val, mode);
1422 return gen_lowpart (mode, x);
1425 temp = gen_reg_rtx (mode);
1426 convert_move (temp, x, unsignedp);
1430 /* This macro is used to determine what the largest unit size that
1431 move_by_pieces can use is. */
1433 /* MOVE_MAX_PIECES is the number of bytes at a time which we can
1434 move efficiently, as opposed to MOVE_MAX which is the maximum
1435 number of bytes we can move with a single instruction. */
1437 #ifndef MOVE_MAX_PIECES
1438 #define MOVE_MAX_PIECES MOVE_MAX
1441 /* STORE_MAX_PIECES is the number of bytes at a time that we can
1442 store efficiently. Due to internal GCC limitations, this is
1443 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1444 for an immediate constant. */
1446 #define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1448 /* Generate several move instructions to copy LEN bytes from block FROM to
1449 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1450 and TO through protect_from_queue before calling.
1452 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1453 used to push FROM to the stack.
1455 ALIGN is maximum alignment we can assume. */
1458 move_by_pieces (to, from, len, align)
1460 unsigned HOST_WIDE_INT len;
1463 struct move_by_pieces data;
1464 rtx to_addr, from_addr = XEXP (from, 0);
1465 unsigned int max_size = MOVE_MAX_PIECES + 1;
1466 enum machine_mode mode = VOIDmode, tmode;
1467 enum insn_code icode;
1470 data.from_addr = from_addr;
1473 to_addr = XEXP (to, 0);
1476 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1477 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1479 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1486 #ifdef STACK_GROWS_DOWNWARD
1492 data.to_addr = to_addr;
1495 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1496 || GET_CODE (from_addr) == POST_INC
1497 || GET_CODE (from_addr) == POST_DEC);
1499 data.explicit_inc_from = 0;
1500 data.explicit_inc_to = 0;
1501 if (data.reverse) data.offset = len;
1504 /* If copying requires more than two move insns,
1505 copy addresses to registers (to make displacements shorter)
1506 and use post-increment if available. */
1507 if (!(data.autinc_from && data.autinc_to)
1508 && move_by_pieces_ninsns (len, align) > 2)
1510 /* Find the mode of the largest move... */
1511 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1512 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1513 if (GET_MODE_SIZE (tmode) < max_size)
1516 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
1518 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1519 data.autinc_from = 1;
1520 data.explicit_inc_from = -1;
1522 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
1524 data.from_addr = copy_addr_to_reg (from_addr);
1525 data.autinc_from = 1;
1526 data.explicit_inc_from = 1;
1528 if (!data.autinc_from && CONSTANT_P (from_addr))
1529 data.from_addr = copy_addr_to_reg (from_addr);
1530 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
1532 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1534 data.explicit_inc_to = -1;
1536 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
1538 data.to_addr = copy_addr_to_reg (to_addr);
1540 data.explicit_inc_to = 1;
1542 if (!data.autinc_to && CONSTANT_P (to_addr))
1543 data.to_addr = copy_addr_to_reg (to_addr);
1546 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1547 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1548 align = MOVE_MAX * BITS_PER_UNIT;
1550 /* First move what we can in the largest integer mode, then go to
1551 successively smaller modes. */
1553 while (max_size > 1)
1555 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1556 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1557 if (GET_MODE_SIZE (tmode) < max_size)
1560 if (mode == VOIDmode)
1563 icode = mov_optab->handlers[(int) mode].insn_code;
1564 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1565 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1567 max_size = GET_MODE_SIZE (mode);
1570 /* The code above should have handled everything. */
1575 /* Return number of insns required to move L bytes by pieces.
1576 ALIGN (in bits) is maximum alignment we can assume. */
1578 static unsigned HOST_WIDE_INT
1579 move_by_pieces_ninsns (l, align)
1580 unsigned HOST_WIDE_INT l;
1583 unsigned HOST_WIDE_INT n_insns = 0;
1584 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
1586 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
1587 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1588 align = MOVE_MAX * BITS_PER_UNIT;
1590 while (max_size > 1)
1592 enum machine_mode mode = VOIDmode, tmode;
1593 enum insn_code icode;
1595 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1596 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1597 if (GET_MODE_SIZE (tmode) < max_size)
1600 if (mode == VOIDmode)
1603 icode = mov_optab->handlers[(int) mode].insn_code;
1604 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
1605 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1607 max_size = GET_MODE_SIZE (mode);
1615 /* Subroutine of move_by_pieces. Move as many bytes as appropriate
1616 with move instructions for mode MODE. GENFUN is the gen_... function
1617 to make a move insn for that mode. DATA has all the other info. */
1620 move_by_pieces_1 (genfun, mode, data)
1621 rtx (*genfun) PARAMS ((rtx, ...));
1622 enum machine_mode mode;
1623 struct move_by_pieces *data;
1625 unsigned int size = GET_MODE_SIZE (mode);
1626 rtx to1 = NULL_RTX, from1;
1628 while (data->len >= size)
1631 data->offset -= size;
1635 if (data->autinc_to)
1636 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1639 to1 = adjust_address (data->to, mode, data->offset);
1642 if (data->autinc_from)
1643 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1646 from1 = adjust_address (data->from, mode, data->offset);
1648 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
1649 emit_insn (gen_add2_insn (data->to_addr,
1650 GEN_INT (-(HOST_WIDE_INT)size)));
1651 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
1652 emit_insn (gen_add2_insn (data->from_addr,
1653 GEN_INT (-(HOST_WIDE_INT)size)));
1656 emit_insn ((*genfun) (to1, from1));
1659 #ifdef PUSH_ROUNDING
1660 emit_single_push_insn (mode, from1, NULL);
1666 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
1667 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
1668 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
1669 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
1671 if (! data->reverse)
1672 data->offset += size;
1678 /* Emit code to move a block Y to a block X. This may be done with
1679 string-move instructions, with multiple scalar move instructions,
1680 or with a library call.
1682 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
1683 SIZE is an rtx that says how long they are.
1684 ALIGN is the maximum alignment we can assume they have.
1685 METHOD describes what kind of copy this is, and what mechanisms may be used.
1687 Return the address of the new block, if memcpy is called and returns it,
1691 emit_block_move (x, y, size, method)
1693 enum block_op_methods method;
1701 case BLOCK_OP_NORMAL:
1702 may_use_call = true;
1705 case BLOCK_OP_CALL_PARM:
1706 may_use_call = block_move_libcall_safe_for_call_parm ();
1708 /* Make inhibit_defer_pop nonzero around the library call
1709 to force it to pop the arguments right away. */
1713 case BLOCK_OP_NO_LIBCALL:
1714 may_use_call = false;
1721 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
1723 if (GET_MODE (x) != BLKmode)
1725 if (GET_MODE (y) != BLKmode)
1728 x = protect_from_queue (x, 1);
1729 y = protect_from_queue (y, 0);
1730 size = protect_from_queue (size, 0);
1732 if (GET_CODE (x) != MEM)
1734 if (GET_CODE (y) != MEM)
1739 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1740 can be incorrect is coming from __builtin_memcpy. */
1741 if (GET_CODE (size) == CONST_INT)
1743 x = shallow_copy_rtx (x);
1744 y = shallow_copy_rtx (y);
1745 set_mem_size (x, size);
1746 set_mem_size (y, size);
1749 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
1750 move_by_pieces (x, y, INTVAL (size), align);
1751 else if (emit_block_move_via_movstr (x, y, size, align))
1753 else if (may_use_call)
1754 retval = emit_block_move_via_libcall (x, y, size);
1756 emit_block_move_via_loop (x, y, size, align);
1758 if (method == BLOCK_OP_CALL_PARM)
1764 /* A subroutine of emit_block_move. Returns true if calling the
1765 block move libcall will not clobber any parameters which may have
1766 already been placed on the stack. */
1769 block_move_libcall_safe_for_call_parm ()
1775 /* Check to see whether memcpy takes all register arguments. */
1777 takes_regs_uninit, takes_regs_no, takes_regs_yes
1778 } takes_regs = takes_regs_uninit;
1782 case takes_regs_uninit:
1784 CUMULATIVE_ARGS args_so_far;
1787 fn = emit_block_move_libcall_fn (false);
1788 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0);
1790 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1791 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1793 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
1794 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1795 if (!tmp || !REG_P (tmp))
1796 goto fail_takes_regs;
1797 #ifdef FUNCTION_ARG_PARTIAL_NREGS
1798 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1800 goto fail_takes_regs;
1802 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1805 takes_regs = takes_regs_yes;
1808 case takes_regs_yes:
1812 takes_regs = takes_regs_no;
1823 /* A subroutine of emit_block_move. Expand a movstr pattern;
1824 return true if successful. */
1827 emit_block_move_via_movstr (x, y, size, align)
1831 /* Try the most limited insn first, because there's no point
1832 including more than one in the machine description unless
1833 the more limited one has some advantage. */
1835 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1836 enum machine_mode mode;
1838 /* Since this is a move insn, we don't care about volatility. */
1841 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1842 mode = GET_MODE_WIDER_MODE (mode))
1844 enum insn_code code = movstr_optab[(int) mode];
1845 insn_operand_predicate_fn pred;
1847 if (code != CODE_FOR_nothing
1848 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1849 here because if SIZE is less than the mode mask, as it is
1850 returned by the macro, it will definitely be less than the
1851 actual mode mask. */
1852 && ((GET_CODE (size) == CONST_INT
1853 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1854 <= (GET_MODE_MASK (mode) >> 1)))
1855 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1856 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1857 || (*pred) (x, BLKmode))
1858 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1859 || (*pred) (y, BLKmode))
1860 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1861 || (*pred) (opalign, VOIDmode)))
1864 rtx last = get_last_insn ();
1867 op2 = convert_to_mode (mode, size, 1);
1868 pred = insn_data[(int) code].operand[2].predicate;
1869 if (pred != 0 && ! (*pred) (op2, mode))
1870 op2 = copy_to_mode_reg (mode, op2);
1872 /* ??? When called via emit_block_move_for_call, it'd be
1873 nice if there were some way to inform the backend, so
1874 that it doesn't fail the expansion because it thinks
1875 emitting the libcall would be more efficient. */
1877 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1885 delete_insns_since (last);
1893 /* A subroutine of emit_block_move. Expand a call to memcpy or bcopy.
1894 Return the return value from memcpy, 0 otherwise. */
1897 emit_block_move_via_libcall (dst, src, size)
1900 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1901 enum machine_mode size_mode;
1904 /* DST, SRC, or SIZE may have been passed through protect_from_queue.
1906 It is unsafe to save the value generated by protect_from_queue
1907 and reuse it later. Consider what happens if emit_queue is
1908 called before the return value from protect_from_queue is used.
1910 Expansion of the CALL_EXPR below will call emit_queue before
1911 we are finished emitting RTL for argument setup. So if we are
1912 not careful we could get the wrong value for an argument.
1914 To avoid this problem we go ahead and emit code to copy X, Y &
1915 SIZE into new pseudos. We can then place those new pseudos
1916 into an RTL_EXPR and use them later, even after a call to
1919 Note this is not strictly needed for library calls since they
1920 do not call emit_queue before loading their arguments. However,
1921 we may need to have library calls call emit_queue in the future
1922 since failing to do so could cause problems for targets which
1923 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1925 dst = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1926 src = copy_to_mode_reg (Pmode, XEXP (src, 0));
1928 if (TARGET_MEM_FUNCTIONS)
1929 size_mode = TYPE_MODE (sizetype);
1931 size_mode = TYPE_MODE (unsigned_type_node);
1932 size = convert_to_mode (size_mode, size, 1);
1933 size = copy_to_mode_reg (size_mode, size);
1935 /* It is incorrect to use the libcall calling conventions to call
1936 memcpy in this context. This could be a user call to memcpy and
1937 the user may wish to examine the return value from memcpy. For
1938 targets where libcalls and normal calls have different conventions
1939 for returning pointers, we could end up generating incorrect code.
1941 For convenience, we generate the call to bcopy this way as well. */
1943 dst_tree = make_tree (ptr_type_node, dst);
1944 src_tree = make_tree (ptr_type_node, src);
1945 if (TARGET_MEM_FUNCTIONS)
1946 size_tree = make_tree (sizetype, size);
1948 size_tree = make_tree (unsigned_type_node, size);
1950 fn = emit_block_move_libcall_fn (true);
1951 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1952 if (TARGET_MEM_FUNCTIONS)
1954 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1955 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1959 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1960 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1963 /* Now we have to build up the CALL_EXPR itself. */
1964 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1965 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1966 call_expr, arg_list, NULL_TREE);
1967 TREE_SIDE_EFFECTS (call_expr) = 1;
1969 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1971 /* If we are initializing a readonly value, show the above call
1972 clobbered it. Otherwise, a load from it may erroneously be
1973 hoisted from a loop. */
1974 if (RTX_UNCHANGING_P (dst))
1975 emit_insn (gen_rtx_CLOBBER (VOIDmode, dst));
1977 return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
1980 /* A subroutine of emit_block_move_via_libcall. Create the tree node
1981 for the function we use for block copies. The first time FOR_CALL
1982 is true, we call assemble_external. */
1984 static GTY(()) tree block_move_fn;
1987 emit_block_move_libcall_fn (for_call)
1990 static bool emitted_extern;
1991 tree fn = block_move_fn, args;
1995 if (TARGET_MEM_FUNCTIONS)
1997 fn = get_identifier ("memcpy");
1998 args = build_function_type_list (ptr_type_node, ptr_type_node,
1999 const_ptr_type_node, sizetype,
2004 fn = get_identifier ("bcopy");
2005 args = build_function_type_list (void_type_node, const_ptr_type_node,
2006 ptr_type_node, unsigned_type_node,
2010 fn = build_decl (FUNCTION_DECL, fn, args);
2011 DECL_EXTERNAL (fn) = 1;
2012 TREE_PUBLIC (fn) = 1;
2013 DECL_ARTIFICIAL (fn) = 1;
2014 TREE_NOTHROW (fn) = 1;
2019 if (for_call && !emitted_extern)
2021 emitted_extern = true;
2022 make_decl_rtl (fn, NULL);
2023 assemble_external (fn);
2029 /* A subroutine of emit_block_move. Copy the data via an explicit
2030 loop. This is used only when libcalls are forbidden. */
2031 /* ??? It'd be nice to copy in hunks larger than QImode. */
2034 emit_block_move_via_loop (x, y, size, align)
2036 unsigned int align ATTRIBUTE_UNUSED;
2038 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
2039 enum machine_mode iter_mode;
2041 iter_mode = GET_MODE (size);
2042 if (iter_mode == VOIDmode)
2043 iter_mode = word_mode;
2045 top_label = gen_label_rtx ();
2046 cmp_label = gen_label_rtx ();
2047 iter = gen_reg_rtx (iter_mode);
2049 emit_move_insn (iter, const0_rtx);
2051 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
2052 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
2053 do_pending_stack_adjust ();
2055 emit_note (NULL, NOTE_INSN_LOOP_BEG);
2057 emit_jump (cmp_label);
2058 emit_label (top_label);
2060 tmp = convert_modes (Pmode, iter_mode, iter, true);
2061 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
2062 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
2063 x = change_address (x, QImode, x_addr);
2064 y = change_address (y, QImode, y_addr);
2066 emit_move_insn (x, y);
2068 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
2069 true, OPTAB_LIB_WIDEN);
2071 emit_move_insn (iter, tmp);
2073 emit_note (NULL, NOTE_INSN_LOOP_CONT);
2074 emit_label (cmp_label);
2076 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
2079 emit_note (NULL, NOTE_INSN_LOOP_END);
2082 /* Copy all or part of a value X into registers starting at REGNO.
2083 The number of registers to be filled is NREGS. */
2086 move_block_to_reg (regno, x, nregs, mode)
2090 enum machine_mode mode;
2093 #ifdef HAVE_load_multiple
2101 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2102 x = validize_mem (force_const_mem (mode, x));
2104 /* See if the machine can do this with a load multiple insn. */
2105 #ifdef HAVE_load_multiple
2106 if (HAVE_load_multiple)
2108 last = get_last_insn ();
2109 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
2117 delete_insns_since (last);
2121 for (i = 0; i < nregs; i++)
2122 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
2123 operand_subword_force (x, i, mode));
2126 /* Copy all or part of a BLKmode value X out of registers starting at REGNO.
2127 The number of registers to be filled is NREGS. SIZE indicates the number
2128 of bytes in the object X. */
2131 move_block_from_reg (regno, x, nregs, size)
2138 #ifdef HAVE_store_multiple
2142 enum machine_mode mode;
2147 /* If SIZE is that of a mode no bigger than a word, just use that
2148 mode's store operation. */
2149 if (size <= UNITS_PER_WORD
2150 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
2152 emit_move_insn (adjust_address (x, mode, 0), gen_rtx_REG (mode, regno));
2156 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
2157 to the left before storing to memory. Note that the previous test
2158 doesn't handle all cases (e.g. SIZE == 3). */
2159 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
2161 rtx tem = operand_subword (x, 0, 1, BLKmode);
2167 shift = expand_shift (LSHIFT_EXPR, word_mode,
2168 gen_rtx_REG (word_mode, regno),
2169 build_int_2 ((UNITS_PER_WORD - size)
2170 * BITS_PER_UNIT, 0), NULL_RTX, 0);
2171 emit_move_insn (tem, shift);
2175 /* See if the machine can do this with a store multiple insn. */
2176 #ifdef HAVE_store_multiple
2177 if (HAVE_store_multiple)
2179 last = get_last_insn ();
2180 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
2188 delete_insns_since (last);
2192 for (i = 0; i < nregs; i++)
2194 rtx tem = operand_subword (x, i, 1, BLKmode);
2199 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
2203 /* Generate a PARALLEL rtx for a new non-consecutive group of registers from
2204 ORIG, where ORIG is a non-consecutive group of registers represented by
2205 a PARALLEL. The clone is identical to the original except in that the
2206 original set of registers is replaced by a new set of pseudo registers.
2207 The new set has the same modes as the original set. */
2210 gen_group_rtx (orig)
2216 if (GET_CODE (orig) != PARALLEL)
2219 length = XVECLEN (orig, 0);
2220 tmps = (rtx *) alloca (sizeof (rtx) * length);
2222 /* Skip a NULL entry in first slot. */
2223 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
2228 for (; i < length; i++)
2230 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
2231 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
2233 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
2236 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
2239 /* Emit code to move a block SRC to a block DST, where DST is non-consecutive
2240 registers represented by a PARALLEL. SSIZE represents the total size of
2241 block SRC in bytes, or -1 if not known. */
2242 /* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatant assumption that
2243 the balance will be in what would be the low-order memory addresses, i.e.
2244 left justified for big endian, right justified for little endian. This
2245 happens to be true for the targets currently using this support. If this
2246 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
2250 emit_group_load (dst, orig_src, ssize)
2257 if (GET_CODE (dst) != PARALLEL)
2260 /* Check for a NULL entry, used to indicate that the parameter goes
2261 both on the stack and in registers. */
2262 if (XEXP (XVECEXP (dst, 0, 0), 0))
2267 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
2269 /* Process the pieces. */
2270 for (i = start; i < XVECLEN (dst, 0); i++)
2272 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
2273 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
2274 unsigned int bytelen = GET_MODE_SIZE (mode);
2277 /* Handle trailing fragments that run over the size of the struct. */
2278 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2280 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2281 bytelen = ssize - bytepos;
2286 /* If we won't be loading directly from memory, protect the real source
2287 from strange tricks we might play; but make sure that the source can
2288 be loaded directly into the destination. */
2290 if (GET_CODE (orig_src) != MEM
2291 && (!CONSTANT_P (orig_src)
2292 || (GET_MODE (orig_src) != mode
2293 && GET_MODE (orig_src) != VOIDmode)))
2295 if (GET_MODE (orig_src) == VOIDmode)
2296 src = gen_reg_rtx (mode);
2298 src = gen_reg_rtx (GET_MODE (orig_src));
2300 emit_move_insn (src, orig_src);
2303 /* Optimize the access just a bit. */
2304 if (GET_CODE (src) == MEM
2305 && MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode)
2306 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2307 && bytelen == GET_MODE_SIZE (mode))
2309 tmps[i] = gen_reg_rtx (mode);
2310 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
2312 else if (GET_CODE (src) == CONCAT)
2314 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
2315 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
2317 if ((bytepos == 0 && bytelen == slen0)
2318 || (bytepos != 0 && bytepos + bytelen <= slen))
2320 /* The following assumes that the concatenated objects all
2321 have the same size. In this case, a simple calculation
2322 can be used to determine the object and the bit field
2324 tmps[i] = XEXP (src, bytepos / slen0);
2325 if (! CONSTANT_P (tmps[i])
2326 && (GET_CODE (tmps[i]) != REG || GET_MODE (tmps[i]) != mode))
2327 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
2328 (bytepos % slen0) * BITS_PER_UNIT,
2329 1, NULL_RTX, mode, mode, ssize);
2331 else if (bytepos == 0)
2333 rtx mem = assign_stack_temp (GET_MODE (src), slen, 0);
2334 emit_move_insn (mem, src);
2335 tmps[i] = adjust_address (mem, mode, 0);
2340 else if (CONSTANT_P (src)
2341 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2344 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2345 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2348 if (BYTES_BIG_ENDIAN && shift)
2349 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2350 tmps[i], 0, OPTAB_WIDEN);
2355 /* Copy the extracted pieces into the proper (probable) hard regs. */
2356 for (i = start; i < XVECLEN (dst, 0); i++)
2357 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
2360 /* Emit code to move a block SRC to block DST, where SRC and DST are
2361 non-consecutive groups of registers, each represented by a PARALLEL. */
2364 emit_group_move (dst, src)
2369 if (GET_CODE (src) != PARALLEL
2370 || GET_CODE (dst) != PARALLEL
2371 || XVECLEN (src, 0) != XVECLEN (dst, 0))
2374 /* Skip first entry if NULL. */
2375 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
2376 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
2377 XEXP (XVECEXP (src, 0, i), 0));
2380 /* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2381 registers represented by a PARALLEL. SSIZE represents the total size of
2382 block DST, or -1 if not known. */
2385 emit_group_store (orig_dst, src, ssize)
2392 if (GET_CODE (src) != PARALLEL)
2395 /* Check for a NULL entry, used to indicate that the parameter goes
2396 both on the stack and in registers. */
2397 if (XEXP (XVECEXP (src, 0, 0), 0))
2402 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
2404 /* Copy the (probable) hard regs into pseudos. */
2405 for (i = start; i < XVECLEN (src, 0); i++)
2407 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2408 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2409 emit_move_insn (tmps[i], reg);
2413 /* If we won't be storing directly into memory, protect the real destination
2414 from strange tricks we might play. */
2416 if (GET_CODE (dst) == PARALLEL)
2420 /* We can get a PARALLEL dst if there is a conditional expression in
2421 a return statement. In that case, the dst and src are the same,
2422 so no action is necessary. */
2423 if (rtx_equal_p (dst, src))
2426 /* It is unclear if we can ever reach here, but we may as well handle
2427 it. Allocate a temporary, and split this into a store/load to/from
2430 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2431 emit_group_store (temp, src, ssize);
2432 emit_group_load (dst, temp, ssize);
2435 else if (GET_CODE (dst) != MEM && GET_CODE (dst) != CONCAT)
2437 dst = gen_reg_rtx (GET_MODE (orig_dst));
2438 /* Make life a bit easier for combine. */
2439 emit_move_insn (dst, const0_rtx);
2442 /* Process the pieces. */
2443 for (i = start; i < XVECLEN (src, 0); i++)
2445 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2446 enum machine_mode mode = GET_MODE (tmps[i]);
2447 unsigned int bytelen = GET_MODE_SIZE (mode);
2450 /* Handle trailing fragments that run over the size of the struct. */
2451 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
2453 if (BYTES_BIG_ENDIAN)
2455 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2456 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2457 tmps[i], 0, OPTAB_WIDEN);
2459 bytelen = ssize - bytepos;
2462 if (GET_CODE (dst) == CONCAT)
2464 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2465 dest = XEXP (dst, 0);
2466 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2468 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2469 dest = XEXP (dst, 1);
2475 /* Optimize the access just a bit. */
2476 if (GET_CODE (dest) == MEM
2477 && MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode)
2478 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2479 && bytelen == GET_MODE_SIZE (mode))
2480 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
2482 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
2483 mode, tmps[i], ssize);
2488 /* Copy from the pseudo into the (probable) hard reg. */
2489 if (GET_CODE (dst) == REG)
2490 emit_move_insn (orig_dst, dst);
2493 /* Generate code to copy a BLKmode object of TYPE out of a
2494 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2495 is null, a stack temporary is created. TGTBLK is returned.
2497 The primary purpose of this routine is to handle functions
2498 that return BLKmode structures in registers. Some machines
2499 (the PA for example) want to return all small structures
2500 in registers regardless of the structure's alignment. */
2503 copy_blkmode_from_reg (tgtblk, srcreg, type)
2508 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2509 rtx src = NULL, dst = NULL;
2510 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2511 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2515 tgtblk = assign_temp (build_qualified_type (type,
2517 | TYPE_QUAL_CONST)),
2519 preserve_temp_slots (tgtblk);
2522 /* This code assumes srcreg is at least a full word. If it isn't, copy it
2523 into a new pseudo which is a full word. */
2525 if (GET_MODE (srcreg) != BLKmode
2526 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2527 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2529 /* Structures whose size is not a multiple of a word are aligned
2530 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2531 machine, this means we must skip the empty high order bytes when
2532 calculating the bit offset. */
2533 if (BYTES_BIG_ENDIAN
2534 && bytes % UNITS_PER_WORD)
2535 big_endian_correction
2536 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2538 /* Copy the structure BITSIZE bites at a time.
2540 We could probably emit more efficient code for machines which do not use
2541 strict alignment, but it doesn't seem worth the effort at the current
2543 for (bitpos = 0, xbitpos = big_endian_correction;
2544 bitpos < bytes * BITS_PER_UNIT;
2545 bitpos += bitsize, xbitpos += bitsize)
2547 /* We need a new source operand each time xbitpos is on a
2548 word boundary and when xbitpos == big_endian_correction
2549 (the first time through). */
2550 if (xbitpos % BITS_PER_WORD == 0
2551 || xbitpos == big_endian_correction)
2552 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2555 /* We need a new destination operand each time bitpos is on
2557 if (bitpos % BITS_PER_WORD == 0)
2558 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2560 /* Use xbitpos for the source extraction (right justified) and
2561 xbitpos for the destination store (left justified). */
2562 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2563 extract_bit_field (src, bitsize,
2564 xbitpos % BITS_PER_WORD, 1,
2565 NULL_RTX, word_mode, word_mode,
2573 /* Add a USE expression for REG to the (possibly empty) list pointed
2574 to by CALL_FUSAGE. REG must denote a hard register. */
2577 use_reg (call_fusage, reg)
2578 rtx *call_fusage, reg;
2580 if (GET_CODE (reg) != REG
2581 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
2585 = gen_rtx_EXPR_LIST (VOIDmode,
2586 gen_rtx_USE (VOIDmode, reg), *call_fusage);
2589 /* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2590 starting at REGNO. All of these registers must be hard registers. */
2593 use_regs (call_fusage, regno, nregs)
2600 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2603 for (i = 0; i < nregs; i++)
2604 use_reg (call_fusage, regno_reg_rtx[regno + i]);
2607 /* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2608 PARALLEL REGS. This is for calls that pass values in multiple
2609 non-contiguous locations. The Irix 6 ABI has examples of this. */
2612 use_group_regs (call_fusage, regs)
2618 for (i = 0; i < XVECLEN (regs, 0); i++)
2620 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
2622 /* A NULL entry means the parameter goes both on the stack and in
2623 registers. This can also be a MEM for targets that pass values
2624 partially on the stack and partially in registers. */
2625 if (reg != 0 && GET_CODE (reg) == REG)
2626 use_reg (call_fusage, reg);
2631 /* Determine whether the LEN bytes generated by CONSTFUN can be
2632 stored to memory using several move instructions. CONSTFUNDATA is
2633 a pointer which will be passed as argument in every CONSTFUN call.
2634 ALIGN is maximum alignment we can assume. Return nonzero if a
2635 call to store_by_pieces should succeed. */
2638 can_store_by_pieces (len, constfun, constfundata, align)
2639 unsigned HOST_WIDE_INT len;
2640 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2644 unsigned HOST_WIDE_INT max_size, l;
2645 HOST_WIDE_INT offset = 0;
2646 enum machine_mode mode, tmode;
2647 enum insn_code icode;
2651 if (! MOVE_BY_PIECES_P (len, align))
2654 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2655 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2656 align = MOVE_MAX * BITS_PER_UNIT;
2658 /* We would first store what we can in the largest integer mode, then go to
2659 successively smaller modes. */
2662 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2667 max_size = STORE_MAX_PIECES + 1;
2668 while (max_size > 1)
2670 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2671 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2672 if (GET_MODE_SIZE (tmode) < max_size)
2675 if (mode == VOIDmode)
2678 icode = mov_optab->handlers[(int) mode].insn_code;
2679 if (icode != CODE_FOR_nothing
2680 && align >= GET_MODE_ALIGNMENT (mode))
2682 unsigned int size = GET_MODE_SIZE (mode);
2689 cst = (*constfun) (constfundata, offset, mode);
2690 if (!LEGITIMATE_CONSTANT_P (cst))
2700 max_size = GET_MODE_SIZE (mode);
2703 /* The code above should have handled everything. */
2711 /* Generate several move instructions to store LEN bytes generated by
2712 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2713 pointer which will be passed as argument in every CONSTFUN call.
2714 ALIGN is maximum alignment we can assume. */
2717 store_by_pieces (to, len, constfun, constfundata, align)
2719 unsigned HOST_WIDE_INT len;
2720 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2724 struct store_by_pieces data;
2726 if (! MOVE_BY_PIECES_P (len, align))
2728 to = protect_from_queue (to, 1);
2729 data.constfun = constfun;
2730 data.constfundata = constfundata;
2733 store_by_pieces_1 (&data, align);
2736 /* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2737 rtx with BLKmode). The caller must pass TO through protect_from_queue
2738 before calling. ALIGN is maximum alignment we can assume. */
2741 clear_by_pieces (to, len, align)
2743 unsigned HOST_WIDE_INT len;
2746 struct store_by_pieces data;
2748 data.constfun = clear_by_pieces_1;
2749 data.constfundata = NULL;
2752 store_by_pieces_1 (&data, align);
2755 /* Callback routine for clear_by_pieces.
2756 Return const0_rtx unconditionally. */
2759 clear_by_pieces_1 (data, offset, mode)
2760 PTR data ATTRIBUTE_UNUSED;
2761 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2762 enum machine_mode mode ATTRIBUTE_UNUSED;
2767 /* Subroutine of clear_by_pieces and store_by_pieces.
2768 Generate several move instructions to store LEN bytes of block TO. (A MEM
2769 rtx with BLKmode). The caller must pass TO through protect_from_queue
2770 before calling. ALIGN is maximum alignment we can assume. */
2773 store_by_pieces_1 (data, align)
2774 struct store_by_pieces *data;
2777 rtx to_addr = XEXP (data->to, 0);
2778 unsigned HOST_WIDE_INT max_size = STORE_MAX_PIECES + 1;
2779 enum machine_mode mode = VOIDmode, tmode;
2780 enum insn_code icode;
2783 data->to_addr = to_addr;
2785 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2786 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2788 data->explicit_inc_to = 0;
2790 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2792 data->offset = data->len;
2794 /* If storing requires more than two move insns,
2795 copy addresses to registers (to make displacements shorter)
2796 and use post-increment if available. */
2797 if (!data->autinc_to
2798 && move_by_pieces_ninsns (data->len, align) > 2)
2800 /* Determine the main mode we'll be using. */
2801 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2802 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2803 if (GET_MODE_SIZE (tmode) < max_size)
2806 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
2808 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2809 data->autinc_to = 1;
2810 data->explicit_inc_to = -1;
2813 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2814 && ! data->autinc_to)
2816 data->to_addr = copy_addr_to_reg (to_addr);
2817 data->autinc_to = 1;
2818 data->explicit_inc_to = 1;
2821 if ( !data->autinc_to && CONSTANT_P (to_addr))
2822 data->to_addr = copy_addr_to_reg (to_addr);
2825 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2826 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2827 align = MOVE_MAX * BITS_PER_UNIT;
2829 /* First store what we can in the largest integer mode, then go to
2830 successively smaller modes. */
2832 while (max_size > 1)
2834 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2835 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2836 if (GET_MODE_SIZE (tmode) < max_size)
2839 if (mode == VOIDmode)
2842 icode = mov_optab->handlers[(int) mode].insn_code;
2843 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
2844 store_by_pieces_2 (GEN_FCN (icode), mode, data);
2846 max_size = GET_MODE_SIZE (mode);
2849 /* The code above should have handled everything. */
2854 /* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
2855 with move instructions for mode MODE. GENFUN is the gen_... function
2856 to make a move insn for that mode. DATA has all the other info. */
2859 store_by_pieces_2 (genfun, mode, data)
2860 rtx (*genfun) PARAMS ((rtx, ...));
2861 enum machine_mode mode;
2862 struct store_by_pieces *data;
2864 unsigned int size = GET_MODE_SIZE (mode);
2867 while (data->len >= size)
2870 data->offset -= size;
2872 if (data->autinc_to)
2873 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2876 to1 = adjust_address (data->to, mode, data->offset);
2878 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
2879 emit_insn (gen_add2_insn (data->to_addr,
2880 GEN_INT (-(HOST_WIDE_INT) size)));
2882 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2883 emit_insn ((*genfun) (to1, cst));
2885 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
2886 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2888 if (! data->reverse)
2889 data->offset += size;
2895 /* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2896 its length in bytes. */
2899 clear_storage (object, size)
2904 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2905 : GET_MODE_ALIGNMENT (GET_MODE (object)));
2907 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2908 just move a zero. Otherwise, do this a piece at a time. */
2909 if (GET_MODE (object) != BLKmode
2910 && GET_CODE (size) == CONST_INT
2911 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
2912 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2915 object = protect_from_queue (object, 1);
2916 size = protect_from_queue (size, 0);
2918 if (GET_CODE (size) == CONST_INT
2919 && CLEAR_BY_PIECES_P (INTVAL (size), align))
2920 clear_by_pieces (object, INTVAL (size), align);
2921 else if (clear_storage_via_clrstr (object, size, align))
2924 retval = clear_storage_via_libcall (object, size);
2930 /* A subroutine of clear_storage. Expand a clrstr pattern;
2931 return true if successful. */
2934 clear_storage_via_clrstr (object, size, align)
2938 /* Try the most limited insn first, because there's no point
2939 including more than one in the machine description unless
2940 the more limited one has some advantage. */
2942 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2943 enum machine_mode mode;
2945 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2946 mode = GET_MODE_WIDER_MODE (mode))
2948 enum insn_code code = clrstr_optab[(int) mode];
2949 insn_operand_predicate_fn pred;
2951 if (code != CODE_FOR_nothing
2952 /* We don't need MODE to be narrower than
2953 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2954 the mode mask, as it is returned by the macro, it will
2955 definitely be less than the actual mode mask. */
2956 && ((GET_CODE (size) == CONST_INT
2957 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2958 <= (GET_MODE_MASK (mode) >> 1)))
2959 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2960 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2961 || (*pred) (object, BLKmode))
2962 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2963 || (*pred) (opalign, VOIDmode)))
2966 rtx last = get_last_insn ();
2969 op1 = convert_to_mode (mode, size, 1);
2970 pred = insn_data[(int) code].operand[1].predicate;
2971 if (pred != 0 && ! (*pred) (op1, mode))
2972 op1 = copy_to_mode_reg (mode, op1);
2974 pat = GEN_FCN ((int) code) (object, op1, opalign);
2981 delete_insns_since (last);
2988 /* A subroutine of clear_storage. Expand a call to memset or bzero.
2989 Return the return value of memset, 0 otherwise. */
2992 clear_storage_via_libcall (object, size)
2995 tree call_expr, arg_list, fn, object_tree, size_tree;
2996 enum machine_mode size_mode;
2999 /* OBJECT or SIZE may have been passed through protect_from_queue.
3001 It is unsafe to save the value generated by protect_from_queue
3002 and reuse it later. Consider what happens if emit_queue is
3003 called before the return value from protect_from_queue is used.
3005 Expansion of the CALL_EXPR below will call emit_queue before
3006 we are finished emitting RTL for argument setup. So if we are
3007 not careful we could get the wrong value for an argument.
3009 To avoid this problem we go ahead and emit code to copy OBJECT
3010 and SIZE into new pseudos. We can then place those new pseudos
3011 into an RTL_EXPR and use them later, even after a call to
3014 Note this is not strictly needed for library calls since they
3015 do not call emit_queue before loading their arguments. However,
3016 we may need to have library calls call emit_queue in the future
3017 since failing to do so could cause problems for targets which
3018 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
3020 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
3022 if (TARGET_MEM_FUNCTIONS)
3023 size_mode = TYPE_MODE (sizetype);
3025 size_mode = TYPE_MODE (unsigned_type_node);
3026 size = convert_to_mode (size_mode, size, 1);
3027 size = copy_to_mode_reg (size_mode, size);
3029 /* It is incorrect to use the libcall calling conventions to call
3030 memset in this context. This could be a user call to memset and
3031 the user may wish to examine the return value from memset. For
3032 targets where libcalls and normal calls have different conventions
3033 for returning pointers, we could end up generating incorrect code.
3035 For convenience, we generate the call to bzero this way as well. */
3037 object_tree = make_tree (ptr_type_node, object);
3038 if (TARGET_MEM_FUNCTIONS)
3039 size_tree = make_tree (sizetype, size);
3041 size_tree = make_tree (unsigned_type_node, size);
3043 fn = clear_storage_libcall_fn (true);
3044 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
3045 if (TARGET_MEM_FUNCTIONS)
3046 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
3047 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
3049 /* Now we have to build up the CALL_EXPR itself. */
3050 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
3051 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
3052 call_expr, arg_list, NULL_TREE);
3053 TREE_SIDE_EFFECTS (call_expr) = 1;
3055 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
3057 /* If we are initializing a readonly value, show the above call
3058 clobbered it. Otherwise, a load from it may erroneously be
3059 hoisted from a loop. */
3060 if (RTX_UNCHANGING_P (object))
3061 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
3063 return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
3066 /* A subroutine of clear_storage_via_libcall. Create the tree node
3067 for the function we use for block clears. The first time FOR_CALL
3068 is true, we call assemble_external. */
3070 static GTY(()) tree block_clear_fn;
3073 clear_storage_libcall_fn (for_call)
3076 static bool emitted_extern;
3077 tree fn = block_clear_fn, args;
3081 if (TARGET_MEM_FUNCTIONS)
3083 fn = get_identifier ("memset");
3084 args = build_function_type_list (ptr_type_node, ptr_type_node,
3085 integer_type_node, sizetype,
3090 fn = get_identifier ("bzero");
3091 args = build_function_type_list (void_type_node, ptr_type_node,
3092 unsigned_type_node, NULL_TREE);
3095 fn = build_decl (FUNCTION_DECL, fn, args);
3096 DECL_EXTERNAL (fn) = 1;
3097 TREE_PUBLIC (fn) = 1;
3098 DECL_ARTIFICIAL (fn) = 1;
3099 TREE_NOTHROW (fn) = 1;
3101 block_clear_fn = fn;
3104 if (for_call && !emitted_extern)
3106 emitted_extern = true;
3107 make_decl_rtl (fn, NULL);
3108 assemble_external (fn);
3114 /* Generate code to copy Y into X.
3115 Both Y and X must have the same mode, except that
3116 Y can be a constant with VOIDmode.
3117 This mode cannot be BLKmode; use emit_block_move for that.
3119 Return the last instruction emitted. */
3122 emit_move_insn (x, y)
3125 enum machine_mode mode = GET_MODE (x);
3126 rtx y_cst = NULL_RTX;
3129 x = protect_from_queue (x, 1);
3130 y = protect_from_queue (y, 0);
3132 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
3135 /* Never force constant_p_rtx to memory. */
3136 if (GET_CODE (y) == CONSTANT_P_RTX)
3138 else if (CONSTANT_P (y))
3141 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
3142 && (last_insn = compress_float_constant (x, y)))
3145 if (!LEGITIMATE_CONSTANT_P (y))
3148 y = force_const_mem (mode, y);
3152 /* If X or Y are memory references, verify that their addresses are valid
3154 if (GET_CODE (x) == MEM
3155 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
3156 && ! push_operand (x, GET_MODE (x)))
3158 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
3159 x = validize_mem (x);
3161 if (GET_CODE (y) == MEM
3162 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
3164 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
3165 y = validize_mem (y);
3167 if (mode == BLKmode)
3170 last_insn = emit_move_insn_1 (x, y);
3172 if (y_cst && GET_CODE (x) == REG)
3173 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
3178 /* Low level part of emit_move_insn.
3179 Called just like emit_move_insn, but assumes X and Y
3180 are basically valid. */
3183 emit_move_insn_1 (x, y)
3186 enum machine_mode mode = GET_MODE (x);
3187 enum machine_mode submode;
3188 enum mode_class class = GET_MODE_CLASS (mode);
3190 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
3193 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
3195 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
3197 /* Expand complex moves by moving real part and imag part, if possible. */
3198 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
3199 && BLKmode != (submode = GET_MODE_INNER (mode))
3200 && (mov_optab->handlers[(int) submode].insn_code
3201 != CODE_FOR_nothing))
3203 /* Don't split destination if it is a stack push. */
3204 int stack = push_operand (x, GET_MODE (x));
3206 #ifdef PUSH_ROUNDING
3207 /* In case we output to the stack, but the size is smaller machine can
3208 push exactly, we need to use move instructions. */
3210 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
3211 != GET_MODE_SIZE (submode)))
3214 HOST_WIDE_INT offset1, offset2;
3216 /* Do not use anti_adjust_stack, since we don't want to update
3217 stack_pointer_delta. */
3218 temp = expand_binop (Pmode,
3219 #ifdef STACK_GROWS_DOWNWARD
3227 (GET_MODE_SIZE (GET_MODE (x)))),
3228 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3230 if (temp != stack_pointer_rtx)
3231 emit_move_insn (stack_pointer_rtx, temp);
3233 #ifdef STACK_GROWS_DOWNWARD
3235 offset2 = GET_MODE_SIZE (submode);
3237 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
3238 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
3239 + GET_MODE_SIZE (submode));
3242 emit_move_insn (change_address (x, submode,
3243 gen_rtx_PLUS (Pmode,
3245 GEN_INT (offset1))),
3246 gen_realpart (submode, y));
3247 emit_move_insn (change_address (x, submode,
3248 gen_rtx_PLUS (Pmode,
3250 GEN_INT (offset2))),
3251 gen_imagpart (submode, y));
3255 /* If this is a stack, push the highpart first, so it
3256 will be in the argument order.
3258 In that case, change_address is used only to convert
3259 the mode, not to change the address. */
3262 /* Note that the real part always precedes the imag part in memory
3263 regardless of machine's endianness. */
3264 #ifdef STACK_GROWS_DOWNWARD
3265 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3266 (gen_rtx_MEM (submode, XEXP (x, 0)),
3267 gen_imagpart (submode, y)));
3268 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3269 (gen_rtx_MEM (submode, XEXP (x, 0)),
3270 gen_realpart (submode, y)));
3272 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3273 (gen_rtx_MEM (submode, XEXP (x, 0)),
3274 gen_realpart (submode, y)));
3275 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3276 (gen_rtx_MEM (submode, XEXP (x, 0)),
3277 gen_imagpart (submode, y)));
3282 rtx realpart_x, realpart_y;
3283 rtx imagpart_x, imagpart_y;
3285 /* If this is a complex value with each part being smaller than a
3286 word, the usual calling sequence will likely pack the pieces into
3287 a single register. Unfortunately, SUBREG of hard registers only
3288 deals in terms of words, so we have a problem converting input
3289 arguments to the CONCAT of two registers that is used elsewhere
3290 for complex values. If this is before reload, we can copy it into
3291 memory and reload. FIXME, we should see about using extract and
3292 insert on integer registers, but complex short and complex char
3293 variables should be rarely used. */
3294 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
3295 && (reload_in_progress | reload_completed) == 0)
3298 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
3300 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
3302 if (packed_dest_p || packed_src_p)
3304 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
3305 ? MODE_FLOAT : MODE_INT);
3307 enum machine_mode reg_mode
3308 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
3310 if (reg_mode != BLKmode)
3312 rtx mem = assign_stack_temp (reg_mode,
3313 GET_MODE_SIZE (mode), 0);
3314 rtx cmem = adjust_address (mem, mode, 0);
3317 = N_("function using short complex types cannot be inline");
3321 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
3323 emit_move_insn_1 (cmem, y);
3324 return emit_move_insn_1 (sreg, mem);
3328 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
3330 emit_move_insn_1 (mem, sreg);
3331 return emit_move_insn_1 (x, cmem);
3337 realpart_x = gen_realpart (submode, x);
3338 realpart_y = gen_realpart (submode, y);
3339 imagpart_x = gen_imagpart (submode, x);
3340 imagpart_y = gen_imagpart (submode, y);
3342 /* Show the output dies here. This is necessary for SUBREGs
3343 of pseudos since we cannot track their lifetimes correctly;
3344 hard regs shouldn't appear here except as return values.
3345 We never want to emit such a clobber after reload. */
3347 && ! (reload_in_progress || reload_completed)
3348 && (GET_CODE (realpart_x) == SUBREG
3349 || GET_CODE (imagpart_x) == SUBREG))
3350 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3352 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3353 (realpart_x, realpart_y));
3354 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3355 (imagpart_x, imagpart_y));
3358 return get_last_insn ();
3361 /* This will handle any multi-word or full-word mode that lacks a move_insn
3362 pattern. However, you will get better code if you define such patterns,
3363 even if they must turn into multiple assembler instructions. */
3364 else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
3371 #ifdef PUSH_ROUNDING
3373 /* If X is a push on the stack, do the push now and replace
3374 X with a reference to the stack pointer. */
3375 if (push_operand (x, GET_MODE (x)))
3380 /* Do not use anti_adjust_stack, since we don't want to update
3381 stack_pointer_delta. */
3382 temp = expand_binop (Pmode,
3383 #ifdef STACK_GROWS_DOWNWARD
3391 (GET_MODE_SIZE (GET_MODE (x)))),
3392 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3394 if (temp != stack_pointer_rtx)
3395 emit_move_insn (stack_pointer_rtx, temp);
3397 code = GET_CODE (XEXP (x, 0));
3399 /* Just hope that small offsets off SP are OK. */
3400 if (code == POST_INC)
3401 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3402 GEN_INT (-((HOST_WIDE_INT)
3403 GET_MODE_SIZE (GET_MODE (x)))));
3404 else if (code == POST_DEC)
3405 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3406 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3408 temp = stack_pointer_rtx;
3410 x = change_address (x, VOIDmode, temp);
3414 /* If we are in reload, see if either operand is a MEM whose address
3415 is scheduled for replacement. */
3416 if (reload_in_progress && GET_CODE (x) == MEM
3417 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
3418 x = replace_equiv_address_nv (x, inner);
3419 if (reload_in_progress && GET_CODE (y) == MEM
3420 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
3421 y = replace_equiv_address_nv (y, inner);
3427 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
3430 rtx xpart = operand_subword (x, i, 1, mode);
3431 rtx ypart = operand_subword (y, i, 1, mode);
3433 /* If we can't get a part of Y, put Y into memory if it is a
3434 constant. Otherwise, force it into a register. If we still
3435 can't get a part of Y, abort. */
3436 if (ypart == 0 && CONSTANT_P (y))
3438 y = force_const_mem (mode, y);
3439 ypart = operand_subword (y, i, 1, mode);
3441 else if (ypart == 0)
3442 ypart = operand_subword_force (y, i, mode);
3444 if (xpart == 0 || ypart == 0)
3447 need_clobber |= (GET_CODE (xpart) == SUBREG);
3449 last_insn = emit_move_insn (xpart, ypart);
3455 /* Show the output dies here. This is necessary for SUBREGs
3456 of pseudos since we cannot track their lifetimes correctly;
3457 hard regs shouldn't appear here except as return values.
3458 We never want to emit such a clobber after reload. */
3460 && ! (reload_in_progress || reload_completed)
3461 && need_clobber != 0)
3462 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
3472 /* If Y is representable exactly in a narrower mode, and the target can
3473 perform the extension directly from constant or memory, then emit the
3474 move as an extension. */
3477 compress_float_constant (x, y)
3480 enum machine_mode dstmode = GET_MODE (x);
3481 enum machine_mode orig_srcmode = GET_MODE (y);
3482 enum machine_mode srcmode;
3485 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3487 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3488 srcmode != orig_srcmode;
3489 srcmode = GET_MODE_WIDER_MODE (srcmode))
3492 rtx trunc_y, last_insn;
3494 /* Skip if the target can't extend this way. */
3495 ic = can_extend_p (dstmode, srcmode, 0);
3496 if (ic == CODE_FOR_nothing)
3499 /* Skip if the narrowed value isn't exact. */
3500 if (! exact_real_truncate (srcmode, &r))
3503 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3505 if (LEGITIMATE_CONSTANT_P (trunc_y))
3507 /* Skip if the target needs extra instructions to perform
3509 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3512 else if (float_extend_from_mem[dstmode][srcmode])
3513 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
3517 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3518 last_insn = get_last_insn ();
3520 if (GET_CODE (x) == REG)
3521 REG_NOTES (last_insn)
3522 = gen_rtx_EXPR_LIST (REG_EQUAL, y, REG_NOTES (last_insn));
3530 /* Pushing data onto the stack. */
3532 /* Push a block of length SIZE (perhaps variable)
3533 and return an rtx to address the beginning of the block.
3534 Note that it is not possible for the value returned to be a QUEUED.
3535 The value may be virtual_outgoing_args_rtx.
3537 EXTRA is the number of bytes of padding to push in addition to SIZE.
3538 BELOW nonzero means this padding comes at low addresses;
3539 otherwise, the padding comes at high addresses. */
3542 push_block (size, extra, below)
3548 size = convert_modes (Pmode, ptr_mode, size, 1);
3549 if (CONSTANT_P (size))
3550 anti_adjust_stack (plus_constant (size, extra));
3551 else if (GET_CODE (size) == REG && extra == 0)
3552 anti_adjust_stack (size);
3555 temp = copy_to_mode_reg (Pmode, size);
3557 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
3558 temp, 0, OPTAB_LIB_WIDEN);
3559 anti_adjust_stack (temp);
3562 #ifndef STACK_GROWS_DOWNWARD
3568 temp = virtual_outgoing_args_rtx;
3569 if (extra != 0 && below)
3570 temp = plus_constant (temp, extra);
3574 if (GET_CODE (size) == CONST_INT)
3575 temp = plus_constant (virtual_outgoing_args_rtx,
3576 -INTVAL (size) - (below ? 0 : extra));
3577 else if (extra != 0 && !below)
3578 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3579 negate_rtx (Pmode, plus_constant (size, extra)));
3581 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3582 negate_rtx (Pmode, size));
3585 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3588 #ifdef PUSH_ROUNDING
3590 /* Emit single push insn. */
3593 emit_single_push_insn (mode, x, type)
3595 enum machine_mode mode;
3599 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
3601 enum insn_code icode;
3602 insn_operand_predicate_fn pred;
3604 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3605 /* If there is push pattern, use it. Otherwise try old way of throwing
3606 MEM representing push operation to move expander. */
3607 icode = push_optab->handlers[(int) mode].insn_code;
3608 if (icode != CODE_FOR_nothing)
3610 if (((pred = insn_data[(int) icode].operand[0].predicate)
3611 && !((*pred) (x, mode))))
3612 x = force_reg (mode, x);
3613 emit_insn (GEN_FCN (icode) (x));
3616 if (GET_MODE_SIZE (mode) == rounded_size)
3617 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3620 #ifdef STACK_GROWS_DOWNWARD
3621 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3622 GEN_INT (-(HOST_WIDE_INT) rounded_size));
3624 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3625 GEN_INT (rounded_size));
3627 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3630 dest = gen_rtx_MEM (mode, dest_addr);
3634 set_mem_attributes (dest, type, 1);
3636 if (flag_optimize_sibling_calls)
3637 /* Function incoming arguments may overlap with sibling call
3638 outgoing arguments and we cannot allow reordering of reads
3639 from function arguments with stores to outgoing arguments
3640 of sibling calls. */
3641 set_mem_alias_set (dest, 0);
3643 emit_move_insn (dest, x);
3647 /* Generate code to push X onto the stack, assuming it has mode MODE and
3649 MODE is redundant except when X is a CONST_INT (since they don't
3651 SIZE is an rtx for the size of data to be copied (in bytes),
3652 needed only if X is BLKmode.
3654 ALIGN (in bits) is maximum alignment we can assume.
3656 If PARTIAL and REG are both nonzero, then copy that many of the first
3657 words of X into registers starting with REG, and push the rest of X.
3658 The amount of space pushed is decreased by PARTIAL words,
3659 rounded *down* to a multiple of PARM_BOUNDARY.
3660 REG must be a hard register in this case.
3661 If REG is zero but PARTIAL is not, take any all others actions for an
3662 argument partially in registers, but do not actually load any
3665 EXTRA is the amount in bytes of extra space to leave next to this arg.
3666 This is ignored if an argument block has already been allocated.
3668 On a machine that lacks real push insns, ARGS_ADDR is the address of
3669 the bottom of the argument block for this call. We use indexing off there
3670 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3671 argument block has not been preallocated.
3673 ARGS_SO_FAR is the size of args previously pushed for this call.
3675 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3676 for arguments passed in registers. If nonzero, it will be the number
3677 of bytes required. */
3680 emit_push_insn (x, mode, type, size, align, partial, reg, extra,
3681 args_addr, args_so_far, reg_parm_stack_space,
3684 enum machine_mode mode;
3693 int reg_parm_stack_space;
3697 enum direction stack_direction
3698 #ifdef STACK_GROWS_DOWNWARD
3704 /* Decide where to pad the argument: `downward' for below,
3705 `upward' for above, or `none' for don't pad it.
3706 Default is below for small data on big-endian machines; else above. */
3707 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3709 /* Invert direction if stack is post-decrement.
3711 if (STACK_PUSH_CODE == POST_DEC)
3712 if (where_pad != none)
3713 where_pad = (where_pad == downward ? upward : downward);
3715 xinner = x = protect_from_queue (x, 0);
3717 if (mode == BLKmode)
3719 /* Copy a block into the stack, entirely or partially. */
3722 int used = partial * UNITS_PER_WORD;
3723 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3731 /* USED is now the # of bytes we need not copy to the stack
3732 because registers will take care of them. */
3735 xinner = adjust_address (xinner, BLKmode, used);
3737 /* If the partial register-part of the arg counts in its stack size,
3738 skip the part of stack space corresponding to the registers.
3739 Otherwise, start copying to the beginning of the stack space,
3740 by setting SKIP to 0. */
3741 skip = (reg_parm_stack_space == 0) ? 0 : used;
3743 #ifdef PUSH_ROUNDING
3744 /* Do it with several push insns if that doesn't take lots of insns
3745 and if there is no difficulty with push insns that skip bytes
3746 on the stack for alignment purposes. */
3749 && GET_CODE (size) == CONST_INT
3751 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
3752 /* Here we avoid the case of a structure whose weak alignment
3753 forces many pushes of a small amount of data,
3754 and such small pushes do rounding that causes trouble. */
3755 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
3756 || align >= BIGGEST_ALIGNMENT
3757 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3758 == (align / BITS_PER_UNIT)))
3759 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3761 /* Push padding now if padding above and stack grows down,
3762 or if padding below and stack grows up.
3763 But if space already allocated, this has already been done. */
3764 if (extra && args_addr == 0
3765 && where_pad != none && where_pad != stack_direction)
3766 anti_adjust_stack (GEN_INT (extra));
3768 move_by_pieces (NULL, xinner, INTVAL (size) - used, align);
3771 #endif /* PUSH_ROUNDING */
3775 /* Otherwise make space on the stack and copy the data
3776 to the address of that space. */
3778 /* Deduct words put into registers from the size we must copy. */
3781 if (GET_CODE (size) == CONST_INT)
3782 size = GEN_INT (INTVAL (size) - used);
3784 size = expand_binop (GET_MODE (size), sub_optab, size,
3785 GEN_INT (used), NULL_RTX, 0,
3789 /* Get the address of the stack space.
3790 In this case, we do not deal with EXTRA separately.
3791 A single stack adjust will do. */
3794 temp = push_block (size, extra, where_pad == downward);
3797 else if (GET_CODE (args_so_far) == CONST_INT)
3798 temp = memory_address (BLKmode,
3799 plus_constant (args_addr,
3800 skip + INTVAL (args_so_far)));
3802 temp = memory_address (BLKmode,
3803 plus_constant (gen_rtx_PLUS (Pmode,
3808 if (!ACCUMULATE_OUTGOING_ARGS)
3810 /* If the source is referenced relative to the stack pointer,
3811 copy it to another register to stabilize it. We do not need
3812 to do this if we know that we won't be changing sp. */
3814 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3815 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3816 temp = copy_to_reg (temp);
3819 target = gen_rtx_MEM (BLKmode, temp);
3823 set_mem_attributes (target, type, 1);
3824 /* Function incoming arguments may overlap with sibling call
3825 outgoing arguments and we cannot allow reordering of reads
3826 from function arguments with stores to outgoing arguments
3827 of sibling calls. */
3828 set_mem_alias_set (target, 0);
3831 /* ALIGN may well be better aligned than TYPE, e.g. due to
3832 PARM_BOUNDARY. Assume the caller isn't lying. */
3833 set_mem_align (target, align);
3835 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
3838 else if (partial > 0)
3840 /* Scalar partly in registers. */
3842 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3845 /* # words of start of argument
3846 that we must make space for but need not store. */
3847 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3848 int args_offset = INTVAL (args_so_far);
3851 /* Push padding now if padding above and stack grows down,
3852 or if padding below and stack grows up.
3853 But if space already allocated, this has already been done. */
3854 if (extra && args_addr == 0
3855 && where_pad != none && where_pad != stack_direction)
3856 anti_adjust_stack (GEN_INT (extra));
3858 /* If we make space by pushing it, we might as well push
3859 the real data. Otherwise, we can leave OFFSET nonzero
3860 and leave the space uninitialized. */
3864 /* Now NOT_STACK gets the number of words that we don't need to
3865 allocate on the stack. */
3866 not_stack = partial - offset;
3868 /* If the partial register-part of the arg counts in its stack size,
3869 skip the part of stack space corresponding to the registers.
3870 Otherwise, start copying to the beginning of the stack space,
3871 by setting SKIP to 0. */
3872 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
3874 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3875 x = validize_mem (force_const_mem (mode, x));
3877 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3878 SUBREGs of such registers are not allowed. */
3879 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3880 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3881 x = copy_to_reg (x);
3883 /* Loop over all the words allocated on the stack for this arg. */
3884 /* We can do it by words, because any scalar bigger than a word
3885 has a size a multiple of a word. */
3886 #ifndef PUSH_ARGS_REVERSED
3887 for (i = not_stack; i < size; i++)
3889 for (i = size - 1; i >= not_stack; i--)
3891 if (i >= not_stack + offset)
3892 emit_push_insn (operand_subword_force (x, i, mode),
3893 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3895 GEN_INT (args_offset + ((i - not_stack + skip)
3897 reg_parm_stack_space, alignment_pad);
3902 rtx target = NULL_RTX;
3905 /* Push padding now if padding above and stack grows down,
3906 or if padding below and stack grows up.
3907 But if space already allocated, this has already been done. */
3908 if (extra && args_addr == 0
3909 && where_pad != none && where_pad != stack_direction)
3910 anti_adjust_stack (GEN_INT (extra));
3912 #ifdef PUSH_ROUNDING
3913 if (args_addr == 0 && PUSH_ARGS)
3914 emit_single_push_insn (mode, x, type);
3918 if (GET_CODE (args_so_far) == CONST_INT)
3920 = memory_address (mode,
3921 plus_constant (args_addr,
3922 INTVAL (args_so_far)));
3924 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3927 dest = gen_rtx_MEM (mode, addr);
3930 set_mem_attributes (dest, type, 1);
3931 /* Function incoming arguments may overlap with sibling call
3932 outgoing arguments and we cannot allow reordering of reads
3933 from function arguments with stores to outgoing arguments
3934 of sibling calls. */
3935 set_mem_alias_set (dest, 0);
3938 emit_move_insn (dest, x);
3942 /* If part should go in registers, copy that part
3943 into the appropriate registers. Do this now, at the end,
3944 since mem-to-mem copies above may do function calls. */
3945 if (partial > 0 && reg != 0)
3947 /* Handle calls that pass values in multiple non-contiguous locations.
3948 The Irix 6 ABI has examples of this. */
3949 if (GET_CODE (reg) == PARALLEL)
3950 emit_group_load (reg, x, -1); /* ??? size? */
3952 move_block_to_reg (REGNO (reg), x, partial, mode);
3955 if (extra && args_addr == 0 && where_pad == stack_direction)
3956 anti_adjust_stack (GEN_INT (extra));
3958 if (alignment_pad && args_addr == 0)
3959 anti_adjust_stack (alignment_pad);
3962 /* Return X if X can be used as a subtarget in a sequence of arithmetic
3970 /* Only registers can be subtargets. */
3971 || GET_CODE (x) != REG
3972 /* If the register is readonly, it can't be set more than once. */
3973 || RTX_UNCHANGING_P (x)
3974 /* Don't use hard regs to avoid extending their life. */
3975 || REGNO (x) < FIRST_PSEUDO_REGISTER
3976 /* Avoid subtargets inside loops,
3977 since they hide some invariant expressions. */
3978 || preserve_subexpressions_p ())
3982 /* Expand an assignment that stores the value of FROM into TO.
3983 If WANT_VALUE is nonzero, return an rtx for the value of TO.
3984 (This may contain a QUEUED rtx;
3985 if the value is constant, this rtx is a constant.)
3986 Otherwise, the returned value is NULL_RTX.
3988 SUGGEST_REG is no longer actually used.
3989 It used to mean, copy the value through a register
3990 and return that register, if that is possible.
3991 We now use WANT_VALUE to decide whether to do this. */
3994 expand_assignment (to, from, want_value, suggest_reg)
3997 int suggest_reg ATTRIBUTE_UNUSED;
4002 /* Don't crash if the lhs of the assignment was erroneous. */
4004 if (TREE_CODE (to) == ERROR_MARK)
4006 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
4007 return want_value ? result : NULL_RTX;
4010 /* Assignment of a structure component needs special treatment
4011 if the structure component's rtx is not simply a MEM.
4012 Assignment of an array element at a constant index, and assignment of
4013 an array element in an unaligned packed structure field, has the same
4016 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
4017 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF)
4019 enum machine_mode mode1;
4020 HOST_WIDE_INT bitsize, bitpos;
4028 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
4029 &unsignedp, &volatilep);
4031 /* If we are going to use store_bit_field and extract_bit_field,
4032 make sure to_rtx will be safe for multiple use. */
4034 if (mode1 == VOIDmode && want_value)
4035 tem = stabilize_reference (tem);
4037 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
4041 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
4043 if (GET_CODE (to_rtx) != MEM)
4046 #ifdef POINTERS_EXTEND_UNSIGNED
4047 if (GET_MODE (offset_rtx) != Pmode)
4048 offset_rtx = convert_memory_address (Pmode, offset_rtx);
4050 if (GET_MODE (offset_rtx) != ptr_mode)
4051 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4054 /* A constant address in TO_RTX can have VOIDmode, we must not try
4055 to call force_reg for that case. Avoid that case. */
4056 if (GET_CODE (to_rtx) == MEM
4057 && GET_MODE (to_rtx) == BLKmode
4058 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
4060 && (bitpos % bitsize) == 0
4061 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
4062 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
4064 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
4068 to_rtx = offset_address (to_rtx, offset_rtx,
4069 highest_pow2_factor_for_type (TREE_TYPE (to),
4073 if (GET_CODE (to_rtx) == MEM)
4075 /* If the field is at offset zero, we could have been given the
4076 DECL_RTX of the parent struct. Don't munge it. */
4077 to_rtx = shallow_copy_rtx (to_rtx);
4079 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
4082 /* Deal with volatile and readonly fields. The former is only done
4083 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4084 if (volatilep && GET_CODE (to_rtx) == MEM)
4086 if (to_rtx == orig_to_rtx)
4087 to_rtx = copy_rtx (to_rtx);
4088 MEM_VOLATILE_P (to_rtx) = 1;
4091 if (TREE_CODE (to) == COMPONENT_REF
4092 && TREE_READONLY (TREE_OPERAND (to, 1)))
4094 if (to_rtx == orig_to_rtx)
4095 to_rtx = copy_rtx (to_rtx);
4096 RTX_UNCHANGING_P (to_rtx) = 1;
4099 if (GET_CODE (to_rtx) == MEM && ! can_address_p (to))
4101 if (to_rtx == orig_to_rtx)
4102 to_rtx = copy_rtx (to_rtx);
4103 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4106 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4108 /* Spurious cast for HPUX compiler. */
4109 ? ((enum machine_mode)
4110 TYPE_MODE (TREE_TYPE (to)))
4112 unsignedp, TREE_TYPE (tem), get_alias_set (to));
4114 preserve_temp_slots (result);
4118 /* If the value is meaningful, convert RESULT to the proper mode.
4119 Otherwise, return nothing. */
4120 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
4121 TYPE_MODE (TREE_TYPE (from)),
4123 TREE_UNSIGNED (TREE_TYPE (to)))
4127 /* If the rhs is a function call and its value is not an aggregate,
4128 call the function before we start to compute the lhs.
4129 This is needed for correct code for cases such as
4130 val = setjmp (buf) on machines where reference to val
4131 requires loading up part of an address in a separate insn.
4133 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4134 since it might be a promoted variable where the zero- or sign- extension
4135 needs to be done. Handling this in the normal way is safe because no
4136 computation is done before the call. */
4137 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
4138 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
4139 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4140 && GET_CODE (DECL_RTL (to)) == REG))
4145 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
4147 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4149 /* Handle calls that return values in multiple non-contiguous locations.
4150 The Irix 6 ABI has examples of this. */
4151 if (GET_CODE (to_rtx) == PARALLEL)
4152 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)));
4153 else if (GET_MODE (to_rtx) == BLKmode)
4154 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
4157 #ifdef POINTERS_EXTEND_UNSIGNED
4158 if (POINTER_TYPE_P (TREE_TYPE (to))
4159 && GET_MODE (to_rtx) != GET_MODE (value))
4160 value = convert_memory_address (GET_MODE (to_rtx), value);
4162 emit_move_insn (to_rtx, value);
4164 preserve_temp_slots (to_rtx);
4167 return want_value ? to_rtx : NULL_RTX;
4170 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4171 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4174 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
4176 /* Don't move directly into a return register. */
4177 if (TREE_CODE (to) == RESULT_DECL
4178 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
4183 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
4185 if (GET_CODE (to_rtx) == PARALLEL)
4186 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)));
4188 emit_move_insn (to_rtx, temp);
4190 preserve_temp_slots (to_rtx);
4193 return want_value ? to_rtx : NULL_RTX;
4196 /* In case we are returning the contents of an object which overlaps
4197 the place the value is being stored, use a safe function when copying
4198 a value through a pointer into a structure value return block. */
4199 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
4200 && current_function_returns_struct
4201 && !current_function_returns_pcc_struct)
4206 size = expr_size (from);
4207 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
4209 if (TARGET_MEM_FUNCTIONS)
4210 emit_library_call (memmove_libfunc, LCT_NORMAL,
4211 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4212 XEXP (from_rtx, 0), Pmode,
4213 convert_to_mode (TYPE_MODE (sizetype),
4214 size, TREE_UNSIGNED (sizetype)),
4215 TYPE_MODE (sizetype));
4217 emit_library_call (bcopy_libfunc, LCT_NORMAL,
4218 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
4219 XEXP (to_rtx, 0), Pmode,
4220 convert_to_mode (TYPE_MODE (integer_type_node),
4222 TREE_UNSIGNED (integer_type_node)),
4223 TYPE_MODE (integer_type_node));
4225 preserve_temp_slots (to_rtx);
4228 return want_value ? to_rtx : NULL_RTX;
4231 /* Compute FROM and store the value in the rtx we got. */
4234 result = store_expr (from, to_rtx, want_value);
4235 preserve_temp_slots (result);
4238 return want_value ? result : NULL_RTX;
4241 /* Generate code for computing expression EXP,
4242 and storing the value into TARGET.
4243 TARGET may contain a QUEUED rtx.
4245 If WANT_VALUE is nonzero, return a copy of the value
4246 not in TARGET, so that we can be sure to use the proper
4247 value in a containing expression even if TARGET has something
4248 else stored in it. If possible, we copy the value through a pseudo
4249 and return that pseudo. Or, if the value is constant, we try to
4250 return the constant. In some cases, we return a pseudo
4251 copied *from* TARGET.
4253 If the mode is BLKmode then we may return TARGET itself.
4254 It turns out that in BLKmode it doesn't cause a problem.
4255 because C has no operators that could combine two different
4256 assignments into the same BLKmode object with different values
4257 with no sequence point. Will other languages need this to
4260 If WANT_VALUE is 0, we return NULL, to make sure
4261 to catch quickly any cases where the caller uses the value
4262 and fails to set WANT_VALUE. */
4265 store_expr (exp, target, want_value)
4271 int dont_return_target = 0;
4272 int dont_store_target = 0;
4274 if (TREE_CODE (exp) == COMPOUND_EXPR)
4276 /* Perform first part of compound expression, then assign from second
4278 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4280 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
4282 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4284 /* For conditional expression, get safe form of the target. Then
4285 test the condition, doing the appropriate assignment on either
4286 side. This avoids the creation of unnecessary temporaries.
4287 For non-BLKmode, it is more efficient not to do this. */
4289 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4292 target = protect_from_queue (target, 1);
4294 do_pending_stack_adjust ();
4296 jumpifnot (TREE_OPERAND (exp, 0), lab1);
4297 start_cleanup_deferral ();
4298 store_expr (TREE_OPERAND (exp, 1), target, 0);
4299 end_cleanup_deferral ();
4301 emit_jump_insn (gen_jump (lab2));
4304 start_cleanup_deferral ();
4305 store_expr (TREE_OPERAND (exp, 2), target, 0);
4306 end_cleanup_deferral ();
4311 return want_value ? target : NULL_RTX;
4313 else if (queued_subexp_p (target))
4314 /* If target contains a postincrement, let's not risk
4315 using it as the place to generate the rhs. */
4317 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4319 /* Expand EXP into a new pseudo. */
4320 temp = gen_reg_rtx (GET_MODE (target));
4321 temp = expand_expr (exp, temp, GET_MODE (target), 0);
4324 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
4326 /* If target is volatile, ANSI requires accessing the value
4327 *from* the target, if it is accessed. So make that happen.
4328 In no case return the target itself. */
4329 if (! MEM_VOLATILE_P (target) && want_value)
4330 dont_return_target = 1;
4332 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
4333 && GET_MODE (target) != BLKmode)
4334 /* If target is in memory and caller wants value in a register instead,
4335 arrange that. Pass TARGET as target for expand_expr so that,
4336 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4337 We know expand_expr will not use the target in that case.
4338 Don't do this if TARGET is volatile because we are supposed
4339 to write it and then read it. */
4341 temp = expand_expr (exp, target, GET_MODE (target), 0);
4342 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
4344 /* If TEMP is already in the desired TARGET, only copy it from
4345 memory and don't store it there again. */
4347 || (rtx_equal_p (temp, target)
4348 && ! side_effects_p (temp) && ! side_effects_p (target)))
4349 dont_store_target = 1;
4350 temp = copy_to_reg (temp);
4352 dont_return_target = 1;
4354 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
4355 /* If this is an scalar in a register that is stored in a wider mode
4356 than the declared mode, compute the result into its declared mode
4357 and then convert to the wider mode. Our value is the computed
4360 rtx inner_target = 0;
4362 /* If we don't want a value, we can do the conversion inside EXP,
4363 which will often result in some optimizations. Do the conversion
4364 in two steps: first change the signedness, if needed, then
4365 the extend. But don't do this if the type of EXP is a subtype
4366 of something else since then the conversion might involve
4367 more than just converting modes. */
4368 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
4369 && TREE_TYPE (TREE_TYPE (exp)) == 0)
4371 if (TREE_UNSIGNED (TREE_TYPE (exp))
4372 != SUBREG_PROMOTED_UNSIGNED_P (target))
4374 ((*lang_hooks.types.signed_or_unsigned_type)
4375 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
4377 exp = convert ((*lang_hooks.types.type_for_mode)
4378 (GET_MODE (SUBREG_REG (target)),
4379 SUBREG_PROMOTED_UNSIGNED_P (target)),
4382 inner_target = SUBREG_REG (target);
4385 temp = expand_expr (exp, inner_target, VOIDmode, 0);
4387 /* If TEMP is a volatile MEM and we want a result value, make
4388 the access now so it gets done only once. Likewise if
4389 it contains TARGET. */
4390 if (GET_CODE (temp) == MEM && want_value
4391 && (MEM_VOLATILE_P (temp)
4392 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
4393 temp = copy_to_reg (temp);
4395 /* If TEMP is a VOIDmode constant, use convert_modes to make
4396 sure that we properly convert it. */
4397 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4399 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4400 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4401 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4402 GET_MODE (target), temp,
4403 SUBREG_PROMOTED_UNSIGNED_P (target));
4406 convert_move (SUBREG_REG (target), temp,
4407 SUBREG_PROMOTED_UNSIGNED_P (target));
4409 /* If we promoted a constant, change the mode back down to match
4410 target. Otherwise, the caller might get confused by a result whose
4411 mode is larger than expected. */
4413 if (want_value && GET_MODE (temp) != GET_MODE (target))
4415 if (GET_MODE (temp) != VOIDmode)
4417 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4418 SUBREG_PROMOTED_VAR_P (temp) = 1;
4419 SUBREG_PROMOTED_UNSIGNED_SET (temp,
4420 SUBREG_PROMOTED_UNSIGNED_P (target));
4423 temp = convert_modes (GET_MODE (target),
4424 GET_MODE (SUBREG_REG (target)),
4425 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4428 return want_value ? temp : NULL_RTX;
4432 temp = expand_expr (exp, target, GET_MODE (target), 0);
4433 /* Return TARGET if it's a specified hardware register.
4434 If TARGET is a volatile mem ref, either return TARGET
4435 or return a reg copied *from* TARGET; ANSI requires this.
4437 Otherwise, if TEMP is not TARGET, return TEMP
4438 if it is constant (for efficiency),
4439 or if we really want the correct value. */
4440 if (!(target && GET_CODE (target) == REG
4441 && REGNO (target) < FIRST_PSEUDO_REGISTER)
4442 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
4443 && ! rtx_equal_p (temp, target)
4444 && (CONSTANT_P (temp) || want_value))
4445 dont_return_target = 1;
4448 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4449 the same as that of TARGET, adjust the constant. This is needed, for
4450 example, in case it is a CONST_DOUBLE and we want only a word-sized
4452 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
4453 && TREE_CODE (exp) != ERROR_MARK
4454 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4455 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4456 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4458 /* If value was not generated in the target, store it there.
4459 Convert the value to TARGET's type first if necessary.
4460 If TEMP and TARGET compare equal according to rtx_equal_p, but
4461 one or both of them are volatile memory refs, we have to distinguish
4463 - expand_expr has used TARGET. In this case, we must not generate
4464 another copy. This can be detected by TARGET being equal according
4466 - expand_expr has not used TARGET - that means that the source just
4467 happens to have the same RTX form. Since temp will have been created
4468 by expand_expr, it will compare unequal according to == .
4469 We must generate a copy in this case, to reach the correct number
4470 of volatile memory references. */
4472 if ((! rtx_equal_p (temp, target)
4473 || (temp != target && (side_effects_p (temp)
4474 || side_effects_p (target))))
4475 && TREE_CODE (exp) != ERROR_MARK
4476 && ! dont_store_target
4477 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4478 but TARGET is not valid memory reference, TEMP will differ
4479 from TARGET although it is really the same location. */
4480 && (TREE_CODE_CLASS (TREE_CODE (exp)) != 'd'
4481 || target != DECL_RTL_IF_SET (exp))
4482 /* If there's nothing to copy, don't bother. Don't call expr_size
4483 unless necessary, because some front-ends (C++) expr_size-hook
4484 aborts on objects that are not supposed to be bit-copied or
4486 && expr_size (exp) != const0_rtx)
4488 target = protect_from_queue (target, 1);
4489 if (GET_MODE (temp) != GET_MODE (target)
4490 && GET_MODE (temp) != VOIDmode)
4492 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4493 if (dont_return_target)
4495 /* In this case, we will return TEMP,
4496 so make sure it has the proper mode.
4497 But don't forget to store the value into TARGET. */
4498 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4499 emit_move_insn (target, temp);
4502 convert_move (target, temp, unsignedp);
4505 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4507 /* Handle copying a string constant into an array. The string
4508 constant may be shorter than the array. So copy just the string's
4509 actual length, and clear the rest. First get the size of the data
4510 type of the string, which is actually the size of the target. */
4511 rtx size = expr_size (exp);
4513 if (GET_CODE (size) == CONST_INT
4514 && INTVAL (size) < TREE_STRING_LENGTH (exp))
4515 emit_block_move (target, temp, size, BLOCK_OP_NORMAL);
4518 /* Compute the size of the data to copy from the string. */
4520 = size_binop (MIN_EXPR,
4521 make_tree (sizetype, size),
4522 size_int (TREE_STRING_LENGTH (exp)));
4523 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
4527 /* Copy that much. */
4528 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx, 0);
4529 emit_block_move (target, temp, copy_size_rtx, BLOCK_OP_NORMAL);
4531 /* Figure out how much is left in TARGET that we have to clear.
4532 Do all calculations in ptr_mode. */
4533 if (GET_CODE (copy_size_rtx) == CONST_INT)
4535 size = plus_constant (size, -INTVAL (copy_size_rtx));
4536 target = adjust_address (target, BLKmode,
4537 INTVAL (copy_size_rtx));
4541 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
4542 copy_size_rtx, NULL_RTX, 0,
4545 #ifdef POINTERS_EXTEND_UNSIGNED
4546 if (GET_MODE (copy_size_rtx) != Pmode)
4547 copy_size_rtx = convert_memory_address (Pmode,
4551 target = offset_address (target, copy_size_rtx,
4552 highest_pow2_factor (copy_size));
4553 label = gen_label_rtx ();
4554 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4555 GET_MODE (size), 0, label);
4558 if (size != const0_rtx)
4559 clear_storage (target, size);
4565 /* Handle calls that return values in multiple non-contiguous locations.
4566 The Irix 6 ABI has examples of this. */
4567 else if (GET_CODE (target) == PARALLEL)
4568 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)));
4569 else if (GET_MODE (temp) == BLKmode)
4570 emit_block_move (target, temp, expr_size (exp), BLOCK_OP_NORMAL);
4572 emit_move_insn (target, temp);
4575 /* If we don't want a value, return NULL_RTX. */
4579 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4580 ??? The latter test doesn't seem to make sense. */
4581 else if (dont_return_target && GET_CODE (temp) != MEM)
4584 /* Return TARGET itself if it is a hard register. */
4585 else if (want_value && GET_MODE (target) != BLKmode
4586 && ! (GET_CODE (target) == REG
4587 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4588 return copy_to_reg (target);
4594 /* Return 1 if EXP just contains zeros. */
4602 switch (TREE_CODE (exp))
4606 case NON_LVALUE_EXPR:
4607 case VIEW_CONVERT_EXPR:
4608 return is_zeros_p (TREE_OPERAND (exp, 0));
4611 return integer_zerop (exp);
4615 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4618 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
4621 for (elt = TREE_VECTOR_CST_ELTS (exp); elt;
4622 elt = TREE_CHAIN (elt))
4623 if (!is_zeros_p (TREE_VALUE (elt)))
4629 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4630 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
4631 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4632 if (! is_zeros_p (TREE_VALUE (elt)))
4642 /* Return 1 if EXP contains mostly (3/4) zeros. */
4645 mostly_zeros_p (exp)
4648 if (TREE_CODE (exp) == CONSTRUCTOR)
4650 int elts = 0, zeros = 0;
4651 tree elt = CONSTRUCTOR_ELTS (exp);
4652 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4654 /* If there are no ranges of true bits, it is all zero. */
4655 return elt == NULL_TREE;
4657 for (; elt; elt = TREE_CHAIN (elt))
4659 /* We do not handle the case where the index is a RANGE_EXPR,
4660 so the statistic will be somewhat inaccurate.
4661 We do make a more accurate count in store_constructor itself,
4662 so since this function is only used for nested array elements,
4663 this should be close enough. */
4664 if (mostly_zeros_p (TREE_VALUE (elt)))
4669 return 4 * zeros >= 3 * elts;
4672 return is_zeros_p (exp);
4675 /* Helper function for store_constructor.
4676 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4677 TYPE is the type of the CONSTRUCTOR, not the element type.
4678 CLEARED is as for store_constructor.
4679 ALIAS_SET is the alias set to use for any stores.
4681 This provides a recursive shortcut back to store_constructor when it isn't
4682 necessary to go through store_field. This is so that we can pass through
4683 the cleared field to let store_constructor know that we may not have to
4684 clear a substructure if the outer structure has already been cleared. */
4687 store_constructor_field (target, bitsize, bitpos, mode, exp, type, cleared,
4690 unsigned HOST_WIDE_INT bitsize;
4691 HOST_WIDE_INT bitpos;
4692 enum machine_mode mode;
4697 if (TREE_CODE (exp) == CONSTRUCTOR
4698 && bitpos % BITS_PER_UNIT == 0
4699 /* If we have a nonzero bitpos for a register target, then we just
4700 let store_field do the bitfield handling. This is unlikely to
4701 generate unnecessary clear instructions anyways. */
4702 && (bitpos == 0 || GET_CODE (target) == MEM))
4704 if (GET_CODE (target) == MEM)
4706 = adjust_address (target,
4707 GET_MODE (target) == BLKmode
4709 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4710 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
4713 /* Update the alias set, if required. */
4714 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4715 && MEM_ALIAS_SET (target) != 0)
4717 target = copy_rtx (target);
4718 set_mem_alias_set (target, alias_set);
4721 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
4724 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4728 /* Store the value of constructor EXP into the rtx TARGET.
4729 TARGET is either a REG or a MEM; we know it cannot conflict, since
4730 safe_from_p has been called.
4731 CLEARED is true if TARGET is known to have been zero'd.
4732 SIZE is the number of bytes of TARGET we are allowed to modify: this
4733 may not be the same as the size of EXP if we are assigning to a field
4734 which has been packed to exclude padding bits. */
4737 store_constructor (exp, target, cleared, size)
4743 tree type = TREE_TYPE (exp);
4744 #ifdef WORD_REGISTER_OPERATIONS
4745 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
4748 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4749 || TREE_CODE (type) == QUAL_UNION_TYPE)
4753 /* We either clear the aggregate or indicate the value is dead. */
4754 if ((TREE_CODE (type) == UNION_TYPE
4755 || TREE_CODE (type) == QUAL_UNION_TYPE)
4757 && ! CONSTRUCTOR_ELTS (exp))
4758 /* If the constructor is empty, clear the union. */
4760 clear_storage (target, expr_size (exp));
4764 /* If we are building a static constructor into a register,
4765 set the initial value as zero so we can fold the value into
4766 a constant. But if more than one register is involved,
4767 this probably loses. */
4768 else if (! cleared && GET_CODE (target) == REG && TREE_STATIC (exp)
4769 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
4771 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4775 /* If the constructor has fewer fields than the structure
4776 or if we are initializing the structure to mostly zeros,
4777 clear the whole structure first. Don't do this if TARGET is a
4778 register whose mode size isn't equal to SIZE since clear_storage
4779 can't handle this case. */
4780 else if (! cleared && size > 0
4781 && ((list_length (CONSTRUCTOR_ELTS (exp))
4782 != fields_length (type))
4783 || mostly_zeros_p (exp))
4784 && (GET_CODE (target) != REG
4785 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4788 clear_storage (target, GEN_INT (size));
4793 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4795 /* Store each element of the constructor into
4796 the corresponding field of TARGET. */
4798 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4800 tree field = TREE_PURPOSE (elt);
4801 tree value = TREE_VALUE (elt);
4802 enum machine_mode mode;
4803 HOST_WIDE_INT bitsize;
4804 HOST_WIDE_INT bitpos = 0;
4807 rtx to_rtx = target;
4809 /* Just ignore missing fields.
4810 We cleared the whole structure, above,
4811 if any fields are missing. */
4815 if (cleared && is_zeros_p (value))
4818 if (host_integerp (DECL_SIZE (field), 1))
4819 bitsize = tree_low_cst (DECL_SIZE (field), 1);
4823 unsignedp = TREE_UNSIGNED (field);
4824 mode = DECL_MODE (field);
4825 if (DECL_BIT_FIELD (field))
4828 offset = DECL_FIELD_OFFSET (field);
4829 if (host_integerp (offset, 0)
4830 && host_integerp (bit_position (field), 0))
4832 bitpos = int_bit_position (field);
4836 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4842 if (contains_placeholder_p (offset))
4843 offset = build (WITH_RECORD_EXPR, sizetype,
4844 offset, make_tree (TREE_TYPE (exp), target));
4846 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4847 if (GET_CODE (to_rtx) != MEM)
4850 #ifdef POINTERS_EXTEND_UNSIGNED
4851 if (GET_MODE (offset_rtx) != Pmode)
4852 offset_rtx = convert_memory_address (Pmode, offset_rtx);
4854 if (GET_MODE (offset_rtx) != ptr_mode)
4855 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4858 to_rtx = offset_address (to_rtx, offset_rtx,
4859 highest_pow2_factor (offset));
4862 if (TREE_READONLY (field))
4864 if (GET_CODE (to_rtx) == MEM)
4865 to_rtx = copy_rtx (to_rtx);
4867 RTX_UNCHANGING_P (to_rtx) = 1;
4870 #ifdef WORD_REGISTER_OPERATIONS
4871 /* If this initializes a field that is smaller than a word, at the
4872 start of a word, try to widen it to a full word.
4873 This special case allows us to output C++ member function
4874 initializations in a form that the optimizers can understand. */
4875 if (GET_CODE (target) == REG
4876 && bitsize < BITS_PER_WORD
4877 && bitpos % BITS_PER_WORD == 0
4878 && GET_MODE_CLASS (mode) == MODE_INT
4879 && TREE_CODE (value) == INTEGER_CST
4881 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
4883 tree type = TREE_TYPE (value);
4885 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4887 type = (*lang_hooks.types.type_for_size)
4888 (BITS_PER_WORD, TREE_UNSIGNED (type));
4889 value = convert (type, value);
4892 if (BYTES_BIG_ENDIAN)
4894 = fold (build (LSHIFT_EXPR, type, value,
4895 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4896 bitsize = BITS_PER_WORD;
4901 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
4902 && DECL_NONADDRESSABLE_P (field))
4904 to_rtx = copy_rtx (to_rtx);
4905 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4908 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4909 value, type, cleared,
4910 get_alias_set (TREE_TYPE (field)));
4913 else if (TREE_CODE (type) == ARRAY_TYPE
4914 || TREE_CODE (type) == VECTOR_TYPE)
4919 tree domain = TYPE_DOMAIN (type);
4920 tree elttype = TREE_TYPE (type);
4922 HOST_WIDE_INT minelt = 0;
4923 HOST_WIDE_INT maxelt = 0;
4925 /* Vectors are like arrays, but the domain is stored via an array
4927 if (TREE_CODE (type) == VECTOR_TYPE)
4929 /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
4930 the same field as TYPE_DOMAIN, we are not guaranteed that
4932 domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
4933 domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
4936 const_bounds_p = (TYPE_MIN_VALUE (domain)
4937 && TYPE_MAX_VALUE (domain)
4938 && host_integerp (TYPE_MIN_VALUE (domain), 0)
4939 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4941 /* If we have constant bounds for the range of the type, get them. */
4944 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4945 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4948 /* If the constructor has fewer elements than the array,
4949 clear the whole array first. Similarly if this is
4950 static constructor of a non-BLKmode object. */
4951 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4955 HOST_WIDE_INT count = 0, zero_count = 0;
4956 need_to_clear = ! const_bounds_p;
4958 /* This loop is a more accurate version of the loop in
4959 mostly_zeros_p (it handles RANGE_EXPR in an index).
4960 It is also needed to check for missing elements. */
4961 for (elt = CONSTRUCTOR_ELTS (exp);
4962 elt != NULL_TREE && ! need_to_clear;
4963 elt = TREE_CHAIN (elt))
4965 tree index = TREE_PURPOSE (elt);
4966 HOST_WIDE_INT this_node_count;
4968 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4970 tree lo_index = TREE_OPERAND (index, 0);
4971 tree hi_index = TREE_OPERAND (index, 1);
4973 if (! host_integerp (lo_index, 1)
4974 || ! host_integerp (hi_index, 1))
4980 this_node_count = (tree_low_cst (hi_index, 1)
4981 - tree_low_cst (lo_index, 1) + 1);
4984 this_node_count = 1;
4986 count += this_node_count;
4987 if (mostly_zeros_p (TREE_VALUE (elt)))
4988 zero_count += this_node_count;
4991 /* Clear the entire array first if there are any missing elements,
4992 or if the incidence of zero elements is >= 75%. */
4994 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
4998 if (need_to_clear && size > 0)
5003 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5005 clear_storage (target, GEN_INT (size));
5009 else if (REG_P (target))
5010 /* Inform later passes that the old value is dead. */
5011 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
5013 /* Store each element of the constructor into
5014 the corresponding element of TARGET, determined
5015 by counting the elements. */
5016 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
5018 elt = TREE_CHAIN (elt), i++)
5020 enum machine_mode mode;
5021 HOST_WIDE_INT bitsize;
5022 HOST_WIDE_INT bitpos;
5024 tree value = TREE_VALUE (elt);
5025 tree index = TREE_PURPOSE (elt);
5026 rtx xtarget = target;
5028 if (cleared && is_zeros_p (value))
5031 unsignedp = TREE_UNSIGNED (elttype);
5032 mode = TYPE_MODE (elttype);
5033 if (mode == BLKmode)
5034 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5035 ? tree_low_cst (TYPE_SIZE (elttype), 1)
5038 bitsize = GET_MODE_BITSIZE (mode);
5040 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5042 tree lo_index = TREE_OPERAND (index, 0);
5043 tree hi_index = TREE_OPERAND (index, 1);
5044 rtx index_r, pos_rtx, hi_r, loop_top, loop_end;
5045 struct nesting *loop;
5046 HOST_WIDE_INT lo, hi, count;
5049 /* If the range is constant and "small", unroll the loop. */
5051 && host_integerp (lo_index, 0)
5052 && host_integerp (hi_index, 0)
5053 && (lo = tree_low_cst (lo_index, 0),
5054 hi = tree_low_cst (hi_index, 0),
5055 count = hi - lo + 1,
5056 (GET_CODE (target) != MEM
5058 || (host_integerp (TYPE_SIZE (elttype), 1)
5059 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5062 lo -= minelt; hi -= minelt;
5063 for (; lo <= hi; lo++)
5065 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
5067 if (GET_CODE (target) == MEM
5068 && !MEM_KEEP_ALIAS_SET_P (target)
5069 && TREE_CODE (type) == ARRAY_TYPE
5070 && TYPE_NONALIASED_COMPONENT (type))
5072 target = copy_rtx (target);
5073 MEM_KEEP_ALIAS_SET_P (target) = 1;
5076 store_constructor_field
5077 (target, bitsize, bitpos, mode, value, type, cleared,
5078 get_alias_set (elttype));
5083 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
5084 loop_top = gen_label_rtx ();
5085 loop_end = gen_label_rtx ();
5087 unsignedp = TREE_UNSIGNED (domain);
5089 index = build_decl (VAR_DECL, NULL_TREE, domain);
5092 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5094 SET_DECL_RTL (index, index_r);
5095 if (TREE_CODE (value) == SAVE_EXPR
5096 && SAVE_EXPR_RTL (value) == 0)
5098 /* Make sure value gets expanded once before the
5100 expand_expr (value, const0_rtx, VOIDmode, 0);
5103 store_expr (lo_index, index_r, 0);
5104 loop = expand_start_loop (0);
5106 /* Assign value to element index. */
5108 = convert (ssizetype,
5109 fold (build (MINUS_EXPR, TREE_TYPE (index),
5110 index, TYPE_MIN_VALUE (domain))));
5111 position = size_binop (MULT_EXPR, position,
5113 TYPE_SIZE_UNIT (elttype)));
5115 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
5116 xtarget = offset_address (target, pos_rtx,
5117 highest_pow2_factor (position));
5118 xtarget = adjust_address (xtarget, mode, 0);
5119 if (TREE_CODE (value) == CONSTRUCTOR)
5120 store_constructor (value, xtarget, cleared,
5121 bitsize / BITS_PER_UNIT);
5123 store_expr (value, xtarget, 0);
5125 expand_exit_loop_if_false (loop,
5126 build (LT_EXPR, integer_type_node,
5129 expand_increment (build (PREINCREMENT_EXPR,
5131 index, integer_one_node), 0, 0);
5133 emit_label (loop_end);
5136 else if ((index != 0 && ! host_integerp (index, 0))
5137 || ! host_integerp (TYPE_SIZE (elttype), 1))
5142 index = ssize_int (1);
5145 index = convert (ssizetype,
5146 fold (build (MINUS_EXPR, index,
5147 TYPE_MIN_VALUE (domain))));
5149 position = size_binop (MULT_EXPR, index,
5151 TYPE_SIZE_UNIT (elttype)));
5152 xtarget = offset_address (target,
5153 expand_expr (position, 0, VOIDmode, 0),
5154 highest_pow2_factor (position));
5155 xtarget = adjust_address (xtarget, mode, 0);
5156 store_expr (value, xtarget, 0);
5161 bitpos = ((tree_low_cst (index, 0) - minelt)
5162 * tree_low_cst (TYPE_SIZE (elttype), 1));
5164 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5166 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
5167 && TREE_CODE (type) == ARRAY_TYPE
5168 && TYPE_NONALIASED_COMPONENT (type))
5170 target = copy_rtx (target);
5171 MEM_KEEP_ALIAS_SET_P (target) = 1;
5174 store_constructor_field (target, bitsize, bitpos, mode, value,
5175 type, cleared, get_alias_set (elttype));
5181 /* Set constructor assignments. */
5182 else if (TREE_CODE (type) == SET_TYPE)
5184 tree elt = CONSTRUCTOR_ELTS (exp);
5185 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
5186 tree domain = TYPE_DOMAIN (type);
5187 tree domain_min, domain_max, bitlength;
5189 /* The default implementation strategy is to extract the constant
5190 parts of the constructor, use that to initialize the target,
5191 and then "or" in whatever non-constant ranges we need in addition.
5193 If a large set is all zero or all ones, it is
5194 probably better to set it using memset (if available) or bzero.
5195 Also, if a large set has just a single range, it may also be
5196 better to first clear all the first clear the set (using
5197 bzero/memset), and set the bits we want. */
5199 /* Check for all zeros. */
5200 if (elt == NULL_TREE && size > 0)
5203 clear_storage (target, GEN_INT (size));
5207 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
5208 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
5209 bitlength = size_binop (PLUS_EXPR,
5210 size_diffop (domain_max, domain_min),
5213 nbits = tree_low_cst (bitlength, 1);
5215 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
5216 are "complicated" (more than one range), initialize (the
5217 constant parts) by copying from a constant. */
5218 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
5219 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
5221 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
5222 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
5223 char *bit_buffer = (char *) alloca (nbits);
5224 HOST_WIDE_INT word = 0;
5225 unsigned int bit_pos = 0;
5226 unsigned int ibit = 0;
5227 unsigned int offset = 0; /* In bytes from beginning of set. */
5229 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
5232 if (bit_buffer[ibit])
5234 if (BYTES_BIG_ENDIAN)
5235 word |= (1 << (set_word_size - 1 - bit_pos));
5237 word |= 1 << bit_pos;
5241 if (bit_pos >= set_word_size || ibit == nbits)
5243 if (word != 0 || ! cleared)
5245 rtx datum = GEN_INT (word);
5248 /* The assumption here is that it is safe to use
5249 XEXP if the set is multi-word, but not if
5250 it's single-word. */
5251 if (GET_CODE (target) == MEM)
5252 to_rtx = adjust_address (target, mode, offset);
5253 else if (offset == 0)
5257 emit_move_insn (to_rtx, datum);
5264 offset += set_word_size / BITS_PER_UNIT;
5269 /* Don't bother clearing storage if the set is all ones. */
5270 if (TREE_CHAIN (elt) != NULL_TREE
5271 || (TREE_PURPOSE (elt) == NULL_TREE
5273 : ( ! host_integerp (TREE_VALUE (elt), 0)
5274 || ! host_integerp (TREE_PURPOSE (elt), 0)
5275 || (tree_low_cst (TREE_VALUE (elt), 0)
5276 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5277 != (HOST_WIDE_INT) nbits))))
5278 clear_storage (target, expr_size (exp));
5280 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
5282 /* Start of range of element or NULL. */
5283 tree startbit = TREE_PURPOSE (elt);
5284 /* End of range of element, or element value. */
5285 tree endbit = TREE_VALUE (elt);
5286 HOST_WIDE_INT startb, endb;
5287 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
5289 bitlength_rtx = expand_expr (bitlength,
5290 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
5292 /* Handle non-range tuple element like [ expr ]. */
5293 if (startbit == NULL_TREE)
5295 startbit = save_expr (endbit);
5299 startbit = convert (sizetype, startbit);
5300 endbit = convert (sizetype, endbit);
5301 if (! integer_zerop (domain_min))
5303 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5304 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5306 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
5307 EXPAND_CONST_ADDRESS);
5308 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
5309 EXPAND_CONST_ADDRESS);
5315 ((build_qualified_type ((*lang_hooks.types.type_for_mode)
5316 (GET_MODE (target), 0),
5319 emit_move_insn (targetx, target);
5322 else if (GET_CODE (target) == MEM)
5327 /* Optimization: If startbit and endbit are constants divisible
5328 by BITS_PER_UNIT, call memset instead. */
5329 if (TARGET_MEM_FUNCTIONS
5330 && TREE_CODE (startbit) == INTEGER_CST
5331 && TREE_CODE (endbit) == INTEGER_CST
5332 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
5333 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
5335 emit_library_call (memset_libfunc, LCT_NORMAL,
5337 plus_constant (XEXP (targetx, 0),
5338 startb / BITS_PER_UNIT),
5340 constm1_rtx, TYPE_MODE (integer_type_node),
5341 GEN_INT ((endb - startb) / BITS_PER_UNIT),
5342 TYPE_MODE (sizetype));
5345 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
5346 LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
5347 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
5348 startbit_rtx, TYPE_MODE (sizetype),
5349 endbit_rtx, TYPE_MODE (sizetype));
5352 emit_move_insn (target, targetx);
5360 /* Store the value of EXP (an expression tree)
5361 into a subfield of TARGET which has mode MODE and occupies
5362 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5363 If MODE is VOIDmode, it means that we are storing into a bit-field.
5365 If VALUE_MODE is VOIDmode, return nothing in particular.
5366 UNSIGNEDP is not used in this case.
5368 Otherwise, return an rtx for the value stored. This rtx
5369 has mode VALUE_MODE if that is convenient to do.
5370 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5372 TYPE is the type of the underlying object,
5374 ALIAS_SET is the alias set for the destination. This value will
5375 (in general) be different from that for TARGET, since TARGET is a
5376 reference to the containing structure. */
5379 store_field (target, bitsize, bitpos, mode, exp, value_mode, unsignedp, type,
5382 HOST_WIDE_INT bitsize;
5383 HOST_WIDE_INT bitpos;
5384 enum machine_mode mode;
5386 enum machine_mode value_mode;
5391 HOST_WIDE_INT width_mask = 0;
5393 if (TREE_CODE (exp) == ERROR_MARK)
5396 /* If we have nothing to store, do nothing unless the expression has
5399 return expand_expr (exp, const0_rtx, VOIDmode, 0);
5400 else if (bitsize >=0 && bitsize < HOST_BITS_PER_WIDE_INT)
5401 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
5403 /* If we are storing into an unaligned field of an aligned union that is
5404 in a register, we may have the mode of TARGET being an integer mode but
5405 MODE == BLKmode. In that case, get an aligned object whose size and
5406 alignment are the same as TARGET and store TARGET into it (we can avoid
5407 the store if the field being stored is the entire width of TARGET). Then
5408 call ourselves recursively to store the field into a BLKmode version of
5409 that object. Finally, load from the object into TARGET. This is not
5410 very efficient in general, but should only be slightly more expensive
5411 than the otherwise-required unaligned accesses. Perhaps this can be
5412 cleaned up later. */
5415 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5419 (build_qualified_type (type, TYPE_QUALS (type) | TYPE_QUAL_CONST),
5421 rtx blk_object = adjust_address (object, BLKmode, 0);
5423 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
5424 emit_move_insn (object, target);
5426 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5429 emit_move_insn (target, object);
5431 /* We want to return the BLKmode version of the data. */
5435 if (GET_CODE (target) == CONCAT)
5437 /* We're storing into a struct containing a single __complex. */
5441 return store_expr (exp, target, 0);
5444 /* If the structure is in a register or if the component
5445 is a bit field, we cannot use addressing to access it.
5446 Use bit-field techniques or SUBREG to store in it. */
5448 if (mode == VOIDmode
5449 || (mode != BLKmode && ! direct_store[(int) mode]
5450 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5451 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
5452 || GET_CODE (target) == REG
5453 || GET_CODE (target) == SUBREG
5454 /* If the field isn't aligned enough to store as an ordinary memref,
5455 store it as a bit field. */
5456 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
5457 && (MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode)
5458 || bitpos % GET_MODE_ALIGNMENT (mode)))
5459 /* If the RHS and field are a constant size and the size of the
5460 RHS isn't the same size as the bitfield, we must use bitfield
5463 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5464 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
5466 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
5468 /* If BITSIZE is narrower than the size of the type of EXP
5469 we will be narrowing TEMP. Normally, what's wanted are the
5470 low-order bits. However, if EXP's type is a record and this is
5471 big-endian machine, we want the upper BITSIZE bits. */
5472 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5473 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
5474 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5475 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5476 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5480 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5482 if (mode != VOIDmode && mode != BLKmode
5483 && mode != TYPE_MODE (TREE_TYPE (exp)))
5484 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5486 /* If the modes of TARGET and TEMP are both BLKmode, both
5487 must be in memory and BITPOS must be aligned on a byte
5488 boundary. If so, we simply do a block copy. */
5489 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5491 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5492 || bitpos % BITS_PER_UNIT != 0)
5495 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
5496 emit_block_move (target, temp,
5497 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5501 return value_mode == VOIDmode ? const0_rtx : target;
5504 /* Store the value in the bitfield. */
5505 store_bit_field (target, bitsize, bitpos, mode, temp,
5506 int_size_in_bytes (type));
5508 if (value_mode != VOIDmode)
5510 /* The caller wants an rtx for the value.
5511 If possible, avoid refetching from the bitfield itself. */
5513 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5516 enum machine_mode tmode;
5518 tmode = GET_MODE (temp);
5519 if (tmode == VOIDmode)
5523 return expand_and (tmode, temp,
5524 gen_int_mode (width_mask, tmode),
5527 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5528 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5529 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5532 return extract_bit_field (target, bitsize, bitpos, unsignedp,
5533 NULL_RTX, value_mode, VOIDmode,
5534 int_size_in_bytes (type));
5540 rtx addr = XEXP (target, 0);
5541 rtx to_rtx = target;
5543 /* If a value is wanted, it must be the lhs;
5544 so make the address stable for multiple use. */
5546 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5547 && ! CONSTANT_ADDRESS_P (addr)
5548 /* A frame-pointer reference is already stable. */
5549 && ! (GET_CODE (addr) == PLUS
5550 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5551 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5552 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5553 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
5555 /* Now build a reference to just the desired component. */
5557 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5559 if (to_rtx == target)
5560 to_rtx = copy_rtx (to_rtx);
5562 MEM_SET_IN_STRUCT_P (to_rtx, 1);
5563 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
5564 set_mem_alias_set (to_rtx, alias_set);
5566 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5570 /* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5571 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5572 codes and find the ultimate containing object, which we return.
5574 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5575 bit position, and *PUNSIGNEDP to the signedness of the field.
5576 If the position of the field is variable, we store a tree
5577 giving the variable offset (in units) in *POFFSET.
5578 This offset is in addition to the bit position.
5579 If the position is not variable, we store 0 in *POFFSET.
5581 If any of the extraction expressions is volatile,
5582 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5584 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5585 is a mode that can be used to access the field. In that case, *PBITSIZE
5588 If the field describes a variable-sized object, *PMODE is set to
5589 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
5590 this case, but the address of the object can be found. */
5593 get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
5594 punsignedp, pvolatilep)
5596 HOST_WIDE_INT *pbitsize;
5597 HOST_WIDE_INT *pbitpos;
5599 enum machine_mode *pmode;
5604 enum machine_mode mode = VOIDmode;
5605 tree offset = size_zero_node;
5606 tree bit_offset = bitsize_zero_node;
5607 tree placeholder_ptr = 0;
5610 /* First get the mode, signedness, and size. We do this from just the
5611 outermost expression. */
5612 if (TREE_CODE (exp) == COMPONENT_REF)
5614 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5615 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5616 mode = DECL_MODE (TREE_OPERAND (exp, 1));
5618 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5620 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5622 size_tree = TREE_OPERAND (exp, 1);
5623 *punsignedp = TREE_UNSIGNED (exp);
5627 mode = TYPE_MODE (TREE_TYPE (exp));
5628 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5630 if (mode == BLKmode)
5631 size_tree = TYPE_SIZE (TREE_TYPE (exp));
5633 *pbitsize = GET_MODE_BITSIZE (mode);
5638 if (! host_integerp (size_tree, 1))
5639 mode = BLKmode, *pbitsize = -1;
5641 *pbitsize = tree_low_cst (size_tree, 1);
5644 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5645 and find the ultimate containing object. */
5648 if (TREE_CODE (exp) == BIT_FIELD_REF)
5649 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5650 else if (TREE_CODE (exp) == COMPONENT_REF)
5652 tree field = TREE_OPERAND (exp, 1);
5653 tree this_offset = DECL_FIELD_OFFSET (field);
5655 /* If this field hasn't been filled in yet, don't go
5656 past it. This should only happen when folding expressions
5657 made during type construction. */
5658 if (this_offset == 0)
5660 else if (! TREE_CONSTANT (this_offset)
5661 && contains_placeholder_p (this_offset))
5662 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
5664 offset = size_binop (PLUS_EXPR, offset, this_offset);
5665 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5666 DECL_FIELD_BIT_OFFSET (field));
5668 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
5671 else if (TREE_CODE (exp) == ARRAY_REF
5672 || TREE_CODE (exp) == ARRAY_RANGE_REF)
5674 tree index = TREE_OPERAND (exp, 1);
5675 tree array = TREE_OPERAND (exp, 0);
5676 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5677 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
5678 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
5680 /* We assume all arrays have sizes that are a multiple of a byte.
5681 First subtract the lower bound, if any, in the type of the
5682 index, then convert to sizetype and multiply by the size of the
5684 if (low_bound != 0 && ! integer_zerop (low_bound))
5685 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5688 /* If the index has a self-referential type, pass it to a
5689 WITH_RECORD_EXPR; if the component size is, pass our
5690 component to one. */
5691 if (! TREE_CONSTANT (index)
5692 && contains_placeholder_p (index))
5693 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
5694 if (! TREE_CONSTANT (unit_size)
5695 && contains_placeholder_p (unit_size))
5696 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
5698 offset = size_binop (PLUS_EXPR, offset,
5699 size_binop (MULT_EXPR,
5700 convert (sizetype, index),
5704 else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5706 tree new = find_placeholder (exp, &placeholder_ptr);
5708 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5709 We might have been called from tree optimization where we
5710 haven't set up an object yet. */
5718 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5719 && TREE_CODE (exp) != VIEW_CONVERT_EXPR
5720 && ! ((TREE_CODE (exp) == NOP_EXPR
5721 || TREE_CODE (exp) == CONVERT_EXPR)
5722 && (TYPE_MODE (TREE_TYPE (exp))
5723 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5726 /* If any reference in the chain is volatile, the effect is volatile. */
5727 if (TREE_THIS_VOLATILE (exp))
5730 exp = TREE_OPERAND (exp, 0);
5733 /* If OFFSET is constant, see if we can return the whole thing as a
5734 constant bit position. Otherwise, split it up. */
5735 if (host_integerp (offset, 0)
5736 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5738 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5739 && host_integerp (tem, 0))
5740 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5742 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
5748 /* Return 1 if T is an expression that get_inner_reference handles. */
5751 handled_component_p (t)
5754 switch (TREE_CODE (t))
5759 case ARRAY_RANGE_REF:
5760 case NON_LVALUE_EXPR:
5761 case VIEW_CONVERT_EXPR:
5766 return (TYPE_MODE (TREE_TYPE (t))
5767 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5774 /* Given an rtx VALUE that may contain additions and multiplications, return
5775 an equivalent value that just refers to a register, memory, or constant.
5776 This is done by generating instructions to perform the arithmetic and
5777 returning a pseudo-register containing the value.
5779 The returned value may be a REG, SUBREG, MEM or constant. */
5782 force_operand (value, target)
5786 /* Use subtarget as the target for operand 0 of a binary operation. */
5787 rtx subtarget = get_subtarget (target);
5788 enum rtx_code code = GET_CODE (value);
5790 /* Check for a PIC address load. */
5791 if ((code == PLUS || code == MINUS)
5792 && XEXP (value, 0) == pic_offset_table_rtx
5793 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5794 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5795 || GET_CODE (XEXP (value, 1)) == CONST))
5798 subtarget = gen_reg_rtx (GET_MODE (value));
5799 emit_move_insn (subtarget, value);
5803 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
5806 target = gen_reg_rtx (GET_MODE (value));
5807 convert_move (target, force_operand (XEXP (value, 0), NULL),
5808 code == ZERO_EXTEND);
5812 if (GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c')
5814 op2 = XEXP (value, 1);
5815 if (!CONSTANT_P (op2) && !(GET_CODE (op2) == REG && op2 != subtarget))
5817 if (code == MINUS && GET_CODE (op2) == CONST_INT)
5820 op2 = negate_rtx (GET_MODE (value), op2);
5823 /* Check for an addition with OP2 a constant integer and our first
5824 operand a PLUS of a virtual register and something else. In that
5825 case, we want to emit the sum of the virtual register and the
5826 constant first and then add the other value. This allows virtual
5827 register instantiation to simply modify the constant rather than
5828 creating another one around this addition. */
5829 if (code == PLUS && GET_CODE (op2) == CONST_INT
5830 && GET_CODE (XEXP (value, 0)) == PLUS
5831 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5832 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5833 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5835 rtx temp = expand_simple_binop (GET_MODE (value), code,
5836 XEXP (XEXP (value, 0), 0), op2,
5837 subtarget, 0, OPTAB_LIB_WIDEN);
5838 return expand_simple_binop (GET_MODE (value), code, temp,
5839 force_operand (XEXP (XEXP (value,
5841 target, 0, OPTAB_LIB_WIDEN);
5844 op1 = force_operand (XEXP (value, 0), subtarget);
5845 op2 = force_operand (op2, NULL_RTX);
5849 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5851 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5852 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5853 target, 1, OPTAB_LIB_WIDEN);
5855 return expand_divmod (0,
5856 FLOAT_MODE_P (GET_MODE (value))
5857 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5858 GET_MODE (value), op1, op2, target, 0);
5861 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5865 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5869 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5873 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5874 target, 0, OPTAB_LIB_WIDEN);
5877 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5878 target, 1, OPTAB_LIB_WIDEN);
5881 if (GET_RTX_CLASS (code) == '1')
5883 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5884 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
5887 #ifdef INSN_SCHEDULING
5888 /* On machines that have insn scheduling, we want all memory reference to be
5889 explicit, so we need to deal with such paradoxical SUBREGs. */
5890 if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
5891 && (GET_MODE_SIZE (GET_MODE (value))
5892 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5894 = simplify_gen_subreg (GET_MODE (value),
5895 force_reg (GET_MODE (SUBREG_REG (value)),
5896 force_operand (SUBREG_REG (value),
5898 GET_MODE (SUBREG_REG (value)),
5899 SUBREG_BYTE (value));
5905 /* Subroutine of expand_expr: return nonzero iff there is no way that
5906 EXP can reference X, which is being modified. TOP_P is nonzero if this
5907 call is going to be used to determine whether we need a temporary
5908 for EXP, as opposed to a recursive call to this function.
5910 It is always safe for this routine to return zero since it merely
5911 searches for optimization opportunities. */
5914 safe_from_p (x, exp, top_p)
5921 static tree save_expr_list;
5924 /* If EXP has varying size, we MUST use a target since we currently
5925 have no way of allocating temporaries of variable size
5926 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5927 So we assume here that something at a higher level has prevented a
5928 clash. This is somewhat bogus, but the best we can do. Only
5929 do this when X is BLKmode and when we are at the top level. */
5930 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
5931 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
5932 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5933 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5934 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5936 && GET_MODE (x) == BLKmode)
5937 /* If X is in the outgoing argument area, it is always safe. */
5938 || (GET_CODE (x) == MEM
5939 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5940 || (GET_CODE (XEXP (x, 0)) == PLUS
5941 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
5944 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5945 find the underlying pseudo. */
5946 if (GET_CODE (x) == SUBREG)
5949 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5953 /* A SAVE_EXPR might appear many times in the expression passed to the
5954 top-level safe_from_p call, and if it has a complex subexpression,
5955 examining it multiple times could result in a combinatorial explosion.
5956 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
5957 with optimization took about 28 minutes to compile -- even though it was
5958 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5959 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5960 we have processed. Note that the only test of top_p was above. */
5969 rtn = safe_from_p (x, exp, 0);
5971 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5972 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5977 /* Now look at our tree code and possibly recurse. */
5978 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5981 exp_rtl = DECL_RTL_IF_SET (exp);
5988 if (TREE_CODE (exp) == TREE_LIST)
5989 return ((TREE_VALUE (exp) == 0
5990 || safe_from_p (x, TREE_VALUE (exp), 0))
5991 && (TREE_CHAIN (exp) == 0
5992 || safe_from_p (x, TREE_CHAIN (exp), 0)));
5993 else if (TREE_CODE (exp) == ERROR_MARK)
5994 return 1; /* An already-visited SAVE_EXPR? */
5999 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6003 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
6004 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
6008 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6009 the expression. If it is set, we conflict iff we are that rtx or
6010 both are in memory. Otherwise, we check all operands of the
6011 expression recursively. */
6013 switch (TREE_CODE (exp))
6016 /* If the operand is static or we are static, we can't conflict.
6017 Likewise if we don't conflict with the operand at all. */
6018 if (staticp (TREE_OPERAND (exp, 0))
6019 || TREE_STATIC (exp)
6020 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6023 /* Otherwise, the only way this can conflict is if we are taking
6024 the address of a DECL a that address if part of X, which is
6026 exp = TREE_OPERAND (exp, 0);
6029 if (!DECL_RTL_SET_P (exp)
6030 || GET_CODE (DECL_RTL (exp)) != MEM)
6033 exp_rtl = XEXP (DECL_RTL (exp), 0);
6038 if (GET_CODE (x) == MEM
6039 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6040 get_alias_set (exp)))
6045 /* Assume that the call will clobber all hard registers and
6047 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
6048 || GET_CODE (x) == MEM)
6053 /* If a sequence exists, we would have to scan every instruction
6054 in the sequence to see if it was safe. This is probably not
6056 if (RTL_EXPR_SEQUENCE (exp))
6059 exp_rtl = RTL_EXPR_RTL (exp);
6062 case WITH_CLEANUP_EXPR:
6063 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
6066 case CLEANUP_POINT_EXPR:
6067 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
6070 exp_rtl = SAVE_EXPR_RTL (exp);
6074 /* If we've already scanned this, don't do it again. Otherwise,
6075 show we've scanned it and record for clearing the flag if we're
6077 if (TREE_PRIVATE (exp))
6080 TREE_PRIVATE (exp) = 1;
6081 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6083 TREE_PRIVATE (exp) = 0;
6087 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
6091 /* The only operand we look at is operand 1. The rest aren't
6092 part of the expression. */
6093 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
6095 case METHOD_CALL_EXPR:
6096 /* This takes an rtx argument, but shouldn't appear here. */
6103 /* If we have an rtx, we do not need to scan our operands. */
6107 nops = first_rtl_op (TREE_CODE (exp));
6108 for (i = 0; i < nops; i++)
6109 if (TREE_OPERAND (exp, i) != 0
6110 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
6113 /* If this is a language-specific tree code, it may require
6114 special handling. */
6115 if ((unsigned int) TREE_CODE (exp)
6116 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
6117 && !(*lang_hooks.safe_from_p) (x, exp))
6121 /* If we have an rtl, find any enclosed object. Then see if we conflict
6125 if (GET_CODE (exp_rtl) == SUBREG)
6127 exp_rtl = SUBREG_REG (exp_rtl);
6128 if (GET_CODE (exp_rtl) == REG
6129 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6133 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
6134 are memory and they conflict. */
6135 return ! (rtx_equal_p (x, exp_rtl)
6136 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
6137 && true_dependence (exp_rtl, VOIDmode, x,
6138 rtx_addr_varies_p)));
6141 /* If we reach here, it is safe. */
6145 /* Subroutine of expand_expr: return rtx if EXP is a
6146 variable or parameter; else return 0. */
6153 switch (TREE_CODE (exp))
6157 return DECL_RTL (exp);
6163 #ifdef MAX_INTEGER_COMPUTATION_MODE
6166 check_max_integer_computation_mode (exp)
6169 enum tree_code code;
6170 enum machine_mode mode;
6172 /* Strip any NOPs that don't change the mode. */
6174 code = TREE_CODE (exp);
6176 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
6177 if (code == NOP_EXPR
6178 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
6181 /* First check the type of the overall operation. We need only look at
6182 unary, binary and relational operations. */
6183 if (TREE_CODE_CLASS (code) == '1'
6184 || TREE_CODE_CLASS (code) == '2'
6185 || TREE_CODE_CLASS (code) == '<')
6187 mode = TYPE_MODE (TREE_TYPE (exp));
6188 if (GET_MODE_CLASS (mode) == MODE_INT
6189 && mode > MAX_INTEGER_COMPUTATION_MODE)
6190 internal_error ("unsupported wide integer operation");
6193 /* Check operand of a unary op. */
6194 if (TREE_CODE_CLASS (code) == '1')
6196 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6197 if (GET_MODE_CLASS (mode) == MODE_INT
6198 && mode > MAX_INTEGER_COMPUTATION_MODE)
6199 internal_error ("unsupported wide integer operation");
6202 /* Check operands of a binary/comparison op. */
6203 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
6205 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6206 if (GET_MODE_CLASS (mode) == MODE_INT
6207 && mode > MAX_INTEGER_COMPUTATION_MODE)
6208 internal_error ("unsupported wide integer operation");
6210 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
6211 if (GET_MODE_CLASS (mode) == MODE_INT
6212 && mode > MAX_INTEGER_COMPUTATION_MODE)
6213 internal_error ("unsupported wide integer operation");
6218 /* Return the highest power of two that EXP is known to be a multiple of.
6219 This is used in updating alignment of MEMs in array references. */
6221 static HOST_WIDE_INT
6222 highest_pow2_factor (exp)
6225 HOST_WIDE_INT c0, c1;
6227 switch (TREE_CODE (exp))
6230 /* We can find the lowest bit that's a one. If the low
6231 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6232 We need to handle this case since we can find it in a COND_EXPR,
6233 a MIN_EXPR, or a MAX_EXPR. If the constant overlows, we have an
6234 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
6236 if (TREE_CONSTANT_OVERFLOW (exp))
6237 return BIGGEST_ALIGNMENT;
6240 /* Note: tree_low_cst is intentionally not used here,
6241 we don't care about the upper bits. */
6242 c0 = TREE_INT_CST_LOW (exp);
6244 return c0 ? c0 : BIGGEST_ALIGNMENT;
6248 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
6249 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6250 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6251 return MIN (c0, c1);
6254 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6255 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6258 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6260 if (integer_pow2p (TREE_OPERAND (exp, 1))
6261 && host_integerp (TREE_OPERAND (exp, 1), 1))
6263 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6264 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6265 return MAX (1, c0 / c1);
6269 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
6270 case SAVE_EXPR: case WITH_RECORD_EXPR:
6271 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6274 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6277 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6278 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6279 return MIN (c0, c1);
6288 /* Similar, except that it is known that the expression must be a multiple
6289 of the alignment of TYPE. */
6291 static HOST_WIDE_INT
6292 highest_pow2_factor_for_type (type, exp)
6296 HOST_WIDE_INT type_align, factor;
6298 factor = highest_pow2_factor (exp);
6299 type_align = TYPE_ALIGN (type) / BITS_PER_UNIT;
6300 return MAX (factor, type_align);
6303 /* Return an object on the placeholder list that matches EXP, a
6304 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
6305 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
6306 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
6307 is a location which initially points to a starting location in the
6308 placeholder list (zero means start of the list) and where a pointer into
6309 the placeholder list at which the object is found is placed. */
6312 find_placeholder (exp, plist)
6316 tree type = TREE_TYPE (exp);
6317 tree placeholder_expr;
6319 for (placeholder_expr
6320 = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
6321 placeholder_expr != 0;
6322 placeholder_expr = TREE_CHAIN (placeholder_expr))
6324 tree need_type = TYPE_MAIN_VARIANT (type);
6327 /* Find the outermost reference that is of the type we want. If none,
6328 see if any object has a type that is a pointer to the type we
6330 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6331 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
6332 || TREE_CODE (elt) == COND_EXPR)
6333 ? TREE_OPERAND (elt, 1)
6334 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6335 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6336 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6337 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6338 ? TREE_OPERAND (elt, 0) : 0))
6339 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6342 *plist = placeholder_expr;
6346 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6348 = ((TREE_CODE (elt) == COMPOUND_EXPR
6349 || TREE_CODE (elt) == COND_EXPR)
6350 ? TREE_OPERAND (elt, 1)
6351 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6352 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6353 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6354 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6355 ? TREE_OPERAND (elt, 0) : 0))
6356 if (POINTER_TYPE_P (TREE_TYPE (elt))
6357 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6361 *plist = placeholder_expr;
6362 return build1 (INDIRECT_REF, need_type, elt);
6369 /* expand_expr: generate code for computing expression EXP.
6370 An rtx for the computed value is returned. The value is never null.
6371 In the case of a void EXP, const0_rtx is returned.
6373 The value may be stored in TARGET if TARGET is nonzero.
6374 TARGET is just a suggestion; callers must assume that
6375 the rtx returned may not be the same as TARGET.
6377 If TARGET is CONST0_RTX, it means that the value will be ignored.
6379 If TMODE is not VOIDmode, it suggests generating the
6380 result in mode TMODE. But this is done only when convenient.
6381 Otherwise, TMODE is ignored and the value generated in its natural mode.
6382 TMODE is just a suggestion; callers must assume that
6383 the rtx returned may not have mode TMODE.
6385 Note that TARGET may have neither TMODE nor MODE. In that case, it
6386 probably will not be used.
6388 If MODIFIER is EXPAND_SUM then when EXP is an addition
6389 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6390 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6391 products as above, or REG or MEM, or constant.
6392 Ordinarily in such cases we would output mul or add instructions
6393 and then return a pseudo reg containing the sum.
6395 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6396 it also marks a label as absolutely required (it can't be dead).
6397 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6398 This is used for outputting expressions used in initializers.
6400 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6401 with a constant address even if that address is not normally legitimate.
6402 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
6405 expand_expr (exp, target, tmode, modifier)
6408 enum machine_mode tmode;
6409 enum expand_modifier modifier;
6412 tree type = TREE_TYPE (exp);
6413 int unsignedp = TREE_UNSIGNED (type);
6414 enum machine_mode mode;
6415 enum tree_code code = TREE_CODE (exp);
6417 rtx subtarget, original_target;
6421 /* Handle ERROR_MARK before anybody tries to access its type. */
6422 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
6424 op0 = CONST0_RTX (tmode);
6430 mode = TYPE_MODE (type);
6431 /* Use subtarget as the target for operand 0 of a binary operation. */
6432 subtarget = get_subtarget (target);
6433 original_target = target;
6434 ignore = (target == const0_rtx
6435 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6436 || code == CONVERT_EXPR || code == REFERENCE_EXPR
6437 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
6438 && TREE_CODE (type) == VOID_TYPE));
6440 /* If we are going to ignore this result, we need only do something
6441 if there is a side-effect somewhere in the expression. If there
6442 is, short-circuit the most common cases here. Note that we must
6443 not call expand_expr with anything but const0_rtx in case this
6444 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
6448 if (! TREE_SIDE_EFFECTS (exp))
6451 /* Ensure we reference a volatile object even if value is ignored, but
6452 don't do this if all we are doing is taking its address. */
6453 if (TREE_THIS_VOLATILE (exp)
6454 && TREE_CODE (exp) != FUNCTION_DECL
6455 && mode != VOIDmode && mode != BLKmode
6456 && modifier != EXPAND_CONST_ADDRESS)
6458 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
6459 if (GET_CODE (temp) == MEM)
6460 temp = copy_to_reg (temp);
6464 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6465 || code == INDIRECT_REF || code == BUFFER_REF)
6466 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6469 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
6470 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
6472 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6473 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6476 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6477 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6478 /* If the second operand has no side effects, just evaluate
6480 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6482 else if (code == BIT_FIELD_REF)
6484 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6485 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6486 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
6493 #ifdef MAX_INTEGER_COMPUTATION_MODE
6494 /* Only check stuff here if the mode we want is different from the mode
6495 of the expression; if it's the same, check_max_integer_computiation_mode
6496 will handle it. Do we really need to check this stuff at all? */
6499 && GET_MODE (target) != mode
6500 && TREE_CODE (exp) != INTEGER_CST
6501 && TREE_CODE (exp) != PARM_DECL
6502 && TREE_CODE (exp) != ARRAY_REF
6503 && TREE_CODE (exp) != ARRAY_RANGE_REF
6504 && TREE_CODE (exp) != COMPONENT_REF
6505 && TREE_CODE (exp) != BIT_FIELD_REF
6506 && TREE_CODE (exp) != INDIRECT_REF
6507 && TREE_CODE (exp) != CALL_EXPR
6508 && TREE_CODE (exp) != VAR_DECL
6509 && TREE_CODE (exp) != RTL_EXPR)
6511 enum machine_mode mode = GET_MODE (target);
6513 if (GET_MODE_CLASS (mode) == MODE_INT
6514 && mode > MAX_INTEGER_COMPUTATION_MODE)
6515 internal_error ("unsupported wide integer operation");
6519 && TREE_CODE (exp) != INTEGER_CST
6520 && TREE_CODE (exp) != PARM_DECL
6521 && TREE_CODE (exp) != ARRAY_REF
6522 && TREE_CODE (exp) != ARRAY_RANGE_REF
6523 && TREE_CODE (exp) != COMPONENT_REF
6524 && TREE_CODE (exp) != BIT_FIELD_REF
6525 && TREE_CODE (exp) != INDIRECT_REF
6526 && TREE_CODE (exp) != VAR_DECL
6527 && TREE_CODE (exp) != CALL_EXPR
6528 && TREE_CODE (exp) != RTL_EXPR
6529 && GET_MODE_CLASS (tmode) == MODE_INT
6530 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6531 internal_error ("unsupported wide integer operation");
6533 check_max_integer_computation_mode (exp);
6536 /* If will do cse, generate all results into pseudo registers
6537 since 1) that allows cse to find more things
6538 and 2) otherwise cse could produce an insn the machine
6539 cannot support. And exception is a CONSTRUCTOR into a multi-word
6540 MEM: that's much more likely to be most efficient into the MEM. */
6542 if (! cse_not_expected && mode != BLKmode && target
6543 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
6544 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD))
6551 tree function = decl_function_context (exp);
6552 /* Handle using a label in a containing function. */
6553 if (function != current_function_decl
6554 && function != inline_function_decl && function != 0)
6556 struct function *p = find_function_data (function);
6557 p->expr->x_forced_labels
6558 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
6559 p->expr->x_forced_labels);
6563 if (modifier == EXPAND_INITIALIZER)
6564 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
6569 temp = gen_rtx_MEM (FUNCTION_MODE,
6570 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
6571 if (function != current_function_decl
6572 && function != inline_function_decl && function != 0)
6573 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6578 if (!DECL_RTL_SET_P (exp))
6580 error_with_decl (exp, "prior parameter's size depends on `%s'");
6581 return CONST0_RTX (mode);
6584 /* ... fall through ... */
6587 /* If a static var's type was incomplete when the decl was written,
6588 but the type is complete now, lay out the decl now. */
6589 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
6590 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6592 rtx value = DECL_RTL_IF_SET (exp);
6594 layout_decl (exp, 0);
6596 /* If the RTL was already set, update its mode and memory
6600 PUT_MODE (value, DECL_MODE (exp));
6601 SET_DECL_RTL (exp, 0);
6602 set_mem_attributes (value, exp, 1);
6603 SET_DECL_RTL (exp, value);
6607 /* ... fall through ... */
6611 if (DECL_RTL (exp) == 0)
6614 /* Ensure variable marked as used even if it doesn't go through
6615 a parser. If it hasn't be used yet, write out an external
6617 if (! TREE_USED (exp))
6619 assemble_external (exp);
6620 TREE_USED (exp) = 1;
6623 /* Show we haven't gotten RTL for this yet. */
6626 /* Handle variables inherited from containing functions. */
6627 context = decl_function_context (exp);
6629 /* We treat inline_function_decl as an alias for the current function
6630 because that is the inline function whose vars, types, etc.
6631 are being merged into the current function.
6632 See expand_inline_function. */
6634 if (context != 0 && context != current_function_decl
6635 && context != inline_function_decl
6636 /* If var is static, we don't need a static chain to access it. */
6637 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6638 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6642 /* Mark as non-local and addressable. */
6643 DECL_NONLOCAL (exp) = 1;
6644 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6646 (*lang_hooks.mark_addressable) (exp);
6647 if (GET_CODE (DECL_RTL (exp)) != MEM)
6649 addr = XEXP (DECL_RTL (exp), 0);
6650 if (GET_CODE (addr) == MEM)
6652 = replace_equiv_address (addr,
6653 fix_lexical_addr (XEXP (addr, 0), exp));
6655 addr = fix_lexical_addr (addr, exp);
6657 temp = replace_equiv_address (DECL_RTL (exp), addr);
6660 /* This is the case of an array whose size is to be determined
6661 from its initializer, while the initializer is still being parsed.
6664 else if (GET_CODE (DECL_RTL (exp)) == MEM
6665 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6666 temp = validize_mem (DECL_RTL (exp));
6668 /* If DECL_RTL is memory, we are in the normal case and either
6669 the address is not valid or it is not a register and -fforce-addr
6670 is specified, get the address into a register. */
6672 else if (GET_CODE (DECL_RTL (exp)) == MEM
6673 && modifier != EXPAND_CONST_ADDRESS
6674 && modifier != EXPAND_SUM
6675 && modifier != EXPAND_INITIALIZER
6676 && (! memory_address_p (DECL_MODE (exp),
6677 XEXP (DECL_RTL (exp), 0))
6679 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6680 temp = replace_equiv_address (DECL_RTL (exp),
6681 copy_rtx (XEXP (DECL_RTL (exp), 0)));
6683 /* If we got something, return it. But first, set the alignment
6684 if the address is a register. */
6687 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
6688 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
6693 /* If the mode of DECL_RTL does not match that of the decl, it
6694 must be a promoted value. We return a SUBREG of the wanted mode,
6695 but mark it so that we know that it was already extended. */
6697 if (GET_CODE (DECL_RTL (exp)) == REG
6698 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
6700 /* Get the signedness used for this variable. Ensure we get the
6701 same mode we got when the variable was declared. */
6702 if (GET_MODE (DECL_RTL (exp))
6703 != promote_mode (type, DECL_MODE (exp), &unsignedp,
6704 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
6707 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
6708 SUBREG_PROMOTED_VAR_P (temp) = 1;
6709 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6713 return DECL_RTL (exp);
6716 temp = immed_double_const (TREE_INT_CST_LOW (exp),
6717 TREE_INT_CST_HIGH (exp), mode);
6719 /* ??? If overflow is set, fold will have done an incomplete job,
6720 which can result in (plus xx (const_int 0)), which can get
6721 simplified by validate_replace_rtx during virtual register
6722 instantiation, which can result in unrecognizable insns.
6723 Avoid this by forcing all overflows into registers. */
6724 if (TREE_CONSTANT_OVERFLOW (exp)
6725 && modifier != EXPAND_INITIALIZER)
6726 temp = force_reg (mode, temp);
6731 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
6734 /* If optimized, generate immediate CONST_DOUBLE
6735 which will be turned into memory by reload if necessary.
6737 We used to force a register so that loop.c could see it. But
6738 this does not allow gen_* patterns to perform optimizations with
6739 the constants. It also produces two insns in cases like "x = 1.0;".
6740 On most machines, floating-point constants are not permitted in
6741 many insns, so we'd end up copying it to a register in any case.
6743 Now, we do the copying in expand_binop, if appropriate. */
6744 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6745 TYPE_MODE (TREE_TYPE (exp)));
6749 if (! TREE_CST_RTL (exp))
6750 output_constant_def (exp, 1);
6752 /* TREE_CST_RTL probably contains a constant address.
6753 On RISC machines where a constant address isn't valid,
6754 make some insns to get that address into a register. */
6755 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6756 && modifier != EXPAND_CONST_ADDRESS
6757 && modifier != EXPAND_INITIALIZER
6758 && modifier != EXPAND_SUM
6759 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6761 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
6762 return replace_equiv_address (TREE_CST_RTL (exp),
6763 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6764 return TREE_CST_RTL (exp);
6766 case EXPR_WITH_FILE_LOCATION:
6769 const char *saved_input_filename = input_filename;
6770 int saved_lineno = lineno;
6771 input_filename = EXPR_WFL_FILENAME (exp);
6772 lineno = EXPR_WFL_LINENO (exp);
6773 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6774 emit_line_note (input_filename, lineno);
6775 /* Possibly avoid switching back and forth here. */
6776 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6777 input_filename = saved_input_filename;
6778 lineno = saved_lineno;
6783 context = decl_function_context (exp);
6785 /* If this SAVE_EXPR was at global context, assume we are an
6786 initialization function and move it into our context. */
6788 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6790 /* We treat inline_function_decl as an alias for the current function
6791 because that is the inline function whose vars, types, etc.
6792 are being merged into the current function.
6793 See expand_inline_function. */
6794 if (context == current_function_decl || context == inline_function_decl)
6797 /* If this is non-local, handle it. */
6800 /* The following call just exists to abort if the context is
6801 not of a containing function. */
6802 find_function_data (context);
6804 temp = SAVE_EXPR_RTL (exp);
6805 if (temp && GET_CODE (temp) == REG)
6807 put_var_into_stack (exp);
6808 temp = SAVE_EXPR_RTL (exp);
6810 if (temp == 0 || GET_CODE (temp) != MEM)
6813 replace_equiv_address (temp,
6814 fix_lexical_addr (XEXP (temp, 0), exp));
6816 if (SAVE_EXPR_RTL (exp) == 0)
6818 if (mode == VOIDmode)
6821 temp = assign_temp (build_qualified_type (type,
6823 | TYPE_QUAL_CONST)),
6826 SAVE_EXPR_RTL (exp) = temp;
6827 if (!optimize && GET_CODE (temp) == REG)
6828 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6831 /* If the mode of TEMP does not match that of the expression, it
6832 must be a promoted value. We pass store_expr a SUBREG of the
6833 wanted mode but mark it so that we know that it was already
6834 extended. Note that `unsignedp' was modified above in
6837 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6839 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6840 SUBREG_PROMOTED_VAR_P (temp) = 1;
6841 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6844 if (temp == const0_rtx)
6845 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6847 store_expr (TREE_OPERAND (exp, 0), temp, 0);
6849 TREE_USED (exp) = 1;
6852 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6853 must be a promoted value. We return a SUBREG of the wanted mode,
6854 but mark it so that we know that it was already extended. */
6856 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6857 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6859 /* Compute the signedness and make the proper SUBREG. */
6860 promote_mode (type, mode, &unsignedp, 0);
6861 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
6862 SUBREG_PROMOTED_VAR_P (temp) = 1;
6863 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
6867 return SAVE_EXPR_RTL (exp);
6872 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6873 TREE_OPERAND (exp, 0)
6874 = (*lang_hooks.unsave_expr_now) (TREE_OPERAND (exp, 0));
6878 case PLACEHOLDER_EXPR:
6880 tree old_list = placeholder_list;
6881 tree placeholder_expr = 0;
6883 exp = find_placeholder (exp, &placeholder_expr);
6887 placeholder_list = TREE_CHAIN (placeholder_expr);
6888 temp = expand_expr (exp, original_target, tmode, modifier);
6889 placeholder_list = old_list;
6893 case WITH_RECORD_EXPR:
6894 /* Put the object on the placeholder list, expand our first operand,
6895 and pop the list. */
6896 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6898 target = expand_expr (TREE_OPERAND (exp, 0), original_target, tmode,
6900 placeholder_list = TREE_CHAIN (placeholder_list);
6904 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6905 expand_goto (TREE_OPERAND (exp, 0));
6907 expand_computed_goto (TREE_OPERAND (exp, 0));
6911 expand_exit_loop_if_false (NULL,
6912 invert_truthvalue (TREE_OPERAND (exp, 0)));
6915 case LABELED_BLOCK_EXPR:
6916 if (LABELED_BLOCK_BODY (exp))
6917 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1);
6918 /* Should perhaps use expand_label, but this is simpler and safer. */
6919 do_pending_stack_adjust ();
6920 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6923 case EXIT_BLOCK_EXPR:
6924 if (EXIT_BLOCK_RETURN (exp))
6925 sorry ("returned value in block_exit_expr");
6926 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6931 expand_start_loop (1);
6932 expand_expr_stmt_value (TREE_OPERAND (exp, 0), 0, 1);
6940 tree vars = TREE_OPERAND (exp, 0);
6941 int vars_need_expansion = 0;
6943 /* Need to open a binding contour here because
6944 if there are any cleanups they must be contained here. */
6945 expand_start_bindings (2);
6947 /* Mark the corresponding BLOCK for output in its proper place. */
6948 if (TREE_OPERAND (exp, 2) != 0
6949 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6950 (*lang_hooks.decls.insert_block) (TREE_OPERAND (exp, 2));
6952 /* If VARS have not yet been expanded, expand them now. */
6955 if (!DECL_RTL_SET_P (vars))
6957 vars_need_expansion = 1;
6960 expand_decl_init (vars);
6961 vars = TREE_CHAIN (vars);
6964 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
6966 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6972 if (RTL_EXPR_SEQUENCE (exp))
6974 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6976 emit_insn (RTL_EXPR_SEQUENCE (exp));
6977 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6979 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6980 free_temps_for_rtl_expr (exp);
6981 return RTL_EXPR_RTL (exp);
6984 /* If we don't need the result, just ensure we evaluate any
6990 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
6991 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
6996 /* All elts simple constants => refer to a constant in memory. But
6997 if this is a non-BLKmode mode, let it store a field at a time
6998 since that should make a CONST_INT or CONST_DOUBLE when we
6999 fold. Likewise, if we have a target we can use, it is best to
7000 store directly into the target unless the type is large enough
7001 that memcpy will be used. If we are making an initializer and
7002 all operands are constant, put it in memory as well.
7004 FIXME: Avoid trying to fill vector constructors piece-meal.
7005 Output them with output_constant_def below unless we're sure
7006 they're zeros. This should go away when vector initializers
7007 are treated like VECTOR_CST instead of arrays.
7009 else if ((TREE_STATIC (exp)
7010 && ((mode == BLKmode
7011 && ! (target != 0 && safe_from_p (target, exp, 1)))
7012 || TREE_ADDRESSABLE (exp)
7013 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
7014 && (! MOVE_BY_PIECES_P
7015 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
7017 && ((TREE_CODE (type) == VECTOR_TYPE
7018 && !is_zeros_p (exp))
7019 || ! mostly_zeros_p (exp)))))
7020 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
7022 rtx constructor = output_constant_def (exp, 1);
7024 if (modifier != EXPAND_CONST_ADDRESS
7025 && modifier != EXPAND_INITIALIZER
7026 && modifier != EXPAND_SUM)
7027 constructor = validize_mem (constructor);
7033 /* Handle calls that pass values in multiple non-contiguous
7034 locations. The Irix 6 ABI has examples of this. */
7035 if (target == 0 || ! safe_from_p (target, exp, 1)
7036 || GET_CODE (target) == PARALLEL)
7038 = assign_temp (build_qualified_type (type,
7040 | (TREE_READONLY (exp)
7041 * TYPE_QUAL_CONST))),
7042 0, TREE_ADDRESSABLE (exp), 1);
7044 store_constructor (exp, target, 0, int_expr_size (exp));
7050 tree exp1 = TREE_OPERAND (exp, 0);
7052 tree string = string_constant (exp1, &index);
7054 /* Try to optimize reads from const strings. */
7056 && TREE_CODE (string) == STRING_CST
7057 && TREE_CODE (index) == INTEGER_CST
7058 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
7059 && GET_MODE_CLASS (mode) == MODE_INT
7060 && GET_MODE_SIZE (mode) == 1
7061 && modifier != EXPAND_WRITE)
7062 return gen_int_mode (TREE_STRING_POINTER (string)
7063 [TREE_INT_CST_LOW (index)], mode);
7065 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
7066 op0 = memory_address (mode, op0);
7067 temp = gen_rtx_MEM (mode, op0);
7068 set_mem_attributes (temp, exp, 0);
7070 /* If we are writing to this object and its type is a record with
7071 readonly fields, we must mark it as readonly so it will
7072 conflict with readonly references to those fields. */
7073 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
7074 RTX_UNCHANGING_P (temp) = 1;
7080 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
7084 tree array = TREE_OPERAND (exp, 0);
7085 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
7086 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
7087 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
7090 /* Optimize the special-case of a zero lower bound.
7092 We convert the low_bound to sizetype to avoid some problems
7093 with constant folding. (E.g. suppose the lower bound is 1,
7094 and its mode is QI. Without the conversion, (ARRAY
7095 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
7096 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
7098 if (! integer_zerop (low_bound))
7099 index = size_diffop (index, convert (sizetype, low_bound));
7101 /* Fold an expression like: "foo"[2].
7102 This is not done in fold so it won't happen inside &.
7103 Don't fold if this is for wide characters since it's too
7104 difficult to do correctly and this is a very rare case. */
7106 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
7107 && TREE_CODE (array) == STRING_CST
7108 && TREE_CODE (index) == INTEGER_CST
7109 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
7110 && GET_MODE_CLASS (mode) == MODE_INT
7111 && GET_MODE_SIZE (mode) == 1)
7112 return gen_int_mode (TREE_STRING_POINTER (array)
7113 [TREE_INT_CST_LOW (index)], mode);
7115 /* If this is a constant index into a constant array,
7116 just get the value from the array. Handle both the cases when
7117 we have an explicit constructor and when our operand is a variable
7118 that was declared const. */
7120 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
7121 && TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
7122 && TREE_CODE (index) == INTEGER_CST
7123 && 0 > compare_tree_int (index,
7124 list_length (CONSTRUCTOR_ELTS
7125 (TREE_OPERAND (exp, 0)))))
7129 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
7130 i = TREE_INT_CST_LOW (index);
7131 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
7135 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
7139 else if (optimize >= 1
7140 && modifier != EXPAND_CONST_ADDRESS
7141 && modifier != EXPAND_INITIALIZER
7142 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
7143 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
7144 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
7146 if (TREE_CODE (index) == INTEGER_CST)
7148 tree init = DECL_INITIAL (array);
7150 if (TREE_CODE (init) == CONSTRUCTOR)
7154 for (elem = CONSTRUCTOR_ELTS (init);
7156 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
7157 elem = TREE_CHAIN (elem))
7160 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
7161 return expand_expr (fold (TREE_VALUE (elem)), target,
7164 else if (TREE_CODE (init) == STRING_CST
7165 && 0 > compare_tree_int (index,
7166 TREE_STRING_LENGTH (init)))
7168 tree type = TREE_TYPE (TREE_TYPE (init));
7169 enum machine_mode mode = TYPE_MODE (type);
7171 if (GET_MODE_CLASS (mode) == MODE_INT
7172 && GET_MODE_SIZE (mode) == 1)
7173 return gen_int_mode (TREE_STRING_POINTER (init)
7174 [TREE_INT_CST_LOW (index)], mode);
7183 case ARRAY_RANGE_REF:
7184 /* If the operand is a CONSTRUCTOR, we can just extract the
7185 appropriate field if it is present. Don't do this if we have
7186 already written the data since we want to refer to that copy
7187 and varasm.c assumes that's what we'll do. */
7188 if (code == COMPONENT_REF
7189 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
7190 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
7194 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
7195 elt = TREE_CHAIN (elt))
7196 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
7197 /* We can normally use the value of the field in the
7198 CONSTRUCTOR. However, if this is a bitfield in
7199 an integral mode that we can fit in a HOST_WIDE_INT,
7200 we must mask only the number of bits in the bitfield,
7201 since this is done implicitly by the constructor. If
7202 the bitfield does not meet either of those conditions,
7203 we can't do this optimization. */
7204 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
7205 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
7207 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
7208 <= HOST_BITS_PER_WIDE_INT))))
7210 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
7211 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
7213 HOST_WIDE_INT bitsize
7214 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
7215 enum machine_mode imode
7216 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
7218 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
7220 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
7221 op0 = expand_and (imode, op0, op1, target);
7226 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
7229 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7231 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7241 enum machine_mode mode1;
7242 HOST_WIDE_INT bitsize, bitpos;
7245 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
7246 &mode1, &unsignedp, &volatilep);
7249 /* If we got back the original object, something is wrong. Perhaps
7250 we are evaluating an expression too early. In any event, don't
7251 infinitely recurse. */
7255 /* If TEM's type is a union of variable size, pass TARGET to the inner
7256 computation, since it will need a temporary and TARGET is known
7257 to have to do. This occurs in unchecked conversion in Ada. */
7261 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7262 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7264 ? target : NULL_RTX),
7266 (modifier == EXPAND_INITIALIZER
7267 || modifier == EXPAND_CONST_ADDRESS)
7268 ? modifier : EXPAND_NORMAL);
7270 /* If this is a constant, put it into a register if it is a
7271 legitimate constant and OFFSET is 0 and memory if it isn't. */
7272 if (CONSTANT_P (op0))
7274 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
7275 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7277 op0 = force_reg (mode, op0);
7279 op0 = validize_mem (force_const_mem (mode, op0));
7284 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
7286 /* If this object is in a register, put it into memory.
7287 This case can't occur in C, but can in Ada if we have
7288 unchecked conversion of an expression from a scalar type to
7289 an array or record type. */
7290 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7291 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
7293 /* If the operand is a SAVE_EXPR, we can deal with this by
7294 forcing the SAVE_EXPR into memory. */
7295 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
7297 put_var_into_stack (TREE_OPERAND (exp, 0));
7298 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7303 = build_qualified_type (TREE_TYPE (tem),
7304 (TYPE_QUALS (TREE_TYPE (tem))
7305 | TYPE_QUAL_CONST));
7306 rtx memloc = assign_temp (nt, 1, 1, 1);
7308 emit_move_insn (memloc, op0);
7313 if (GET_CODE (op0) != MEM)
7316 #ifdef POINTERS_EXTEND_UNSIGNED
7317 if (GET_MODE (offset_rtx) != Pmode)
7318 offset_rtx = convert_memory_address (Pmode, offset_rtx);
7320 if (GET_MODE (offset_rtx) != ptr_mode)
7321 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
7324 /* A constant address in OP0 can have VOIDmode, we must not try
7325 to call force_reg for that case. Avoid that case. */
7326 if (GET_CODE (op0) == MEM
7327 && GET_MODE (op0) == BLKmode
7328 && GET_MODE (XEXP (op0, 0)) != VOIDmode
7330 && (bitpos % bitsize) == 0
7331 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
7332 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
7334 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7338 op0 = offset_address (op0, offset_rtx,
7339 highest_pow2_factor (offset));
7342 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7343 record its alignment as BIGGEST_ALIGNMENT. */
7344 if (GET_CODE (op0) == MEM && bitpos == 0 && offset != 0
7345 && is_aligning_offset (offset, tem))
7346 set_mem_align (op0, BIGGEST_ALIGNMENT);
7348 /* Don't forget about volatility even if this is a bitfield. */
7349 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7351 if (op0 == orig_op0)
7352 op0 = copy_rtx (op0);
7354 MEM_VOLATILE_P (op0) = 1;
7357 /* The following code doesn't handle CONCAT.
7358 Assume only bitpos == 0 can be used for CONCAT, due to
7359 one element arrays having the same mode as its element. */
7360 if (GET_CODE (op0) == CONCAT)
7362 if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
7367 /* In cases where an aligned union has an unaligned object
7368 as a field, we might be extracting a BLKmode value from
7369 an integer-mode (e.g., SImode) object. Handle this case
7370 by doing the extract into an object as wide as the field
7371 (which we know to be the width of a basic mode), then
7372 storing into memory, and changing the mode to BLKmode. */
7373 if (mode1 == VOIDmode
7374 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7375 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7376 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
7377 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7378 && modifier != EXPAND_CONST_ADDRESS
7379 && modifier != EXPAND_INITIALIZER)
7380 /* If the field isn't aligned enough to fetch as a memref,
7381 fetch it as a bit field. */
7382 || (mode1 != BLKmode
7383 && SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))
7384 && ((TYPE_ALIGN (TREE_TYPE (tem))
7385 < GET_MODE_ALIGNMENT (mode))
7386 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
7387 /* If the type and the field are a constant size and the
7388 size of the type isn't the same size as the bitfield,
7389 we must use bitfield operations. */
7391 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7393 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7396 enum machine_mode ext_mode = mode;
7398 if (ext_mode == BLKmode
7399 && ! (target != 0 && GET_CODE (op0) == MEM
7400 && GET_CODE (target) == MEM
7401 && bitpos % BITS_PER_UNIT == 0))
7402 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7404 if (ext_mode == BLKmode)
7406 /* In this case, BITPOS must start at a byte boundary and
7407 TARGET, if specified, must be a MEM. */
7408 if (GET_CODE (op0) != MEM
7409 || (target != 0 && GET_CODE (target) != MEM)
7410 || bitpos % BITS_PER_UNIT != 0)
7413 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
7415 target = assign_temp (type, 0, 1, 1);
7417 emit_block_move (target, op0,
7418 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7425 op0 = validize_mem (op0);
7427 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
7428 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7430 op0 = extract_bit_field (op0, bitsize, bitpos,
7431 unsignedp, target, ext_mode, ext_mode,
7432 int_size_in_bytes (TREE_TYPE (tem)));
7434 /* If the result is a record type and BITSIZE is narrower than
7435 the mode of OP0, an integral mode, and this is a big endian
7436 machine, we must put the field into the high-order bits. */
7437 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7438 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7439 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
7440 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7441 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7445 if (mode == BLKmode)
7447 rtx new = assign_temp (build_qualified_type
7448 ((*lang_hooks.types.type_for_mode)
7450 TYPE_QUAL_CONST), 0, 1, 1);
7452 emit_move_insn (new, op0);
7453 op0 = copy_rtx (new);
7454 PUT_MODE (op0, BLKmode);
7455 set_mem_attributes (op0, exp, 1);
7461 /* If the result is BLKmode, use that to access the object
7463 if (mode == BLKmode)
7466 /* Get a reference to just this component. */
7467 if (modifier == EXPAND_CONST_ADDRESS
7468 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7469 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
7471 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
7473 if (op0 == orig_op0)
7474 op0 = copy_rtx (op0);
7476 set_mem_attributes (op0, exp, 0);
7477 if (GET_CODE (XEXP (op0, 0)) == REG)
7478 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
7480 MEM_VOLATILE_P (op0) |= volatilep;
7481 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
7482 || modifier == EXPAND_CONST_ADDRESS
7483 || modifier == EXPAND_INITIALIZER)
7485 else if (target == 0)
7486 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7488 convert_move (target, op0, unsignedp);
7494 rtx insn, before = get_last_insn (), vtbl_ref;
7496 /* Evaluate the interior expression. */
7497 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7500 /* Get or create an instruction off which to hang a note. */
7501 if (REG_P (subtarget))
7504 insn = get_last_insn ();
7507 if (! INSN_P (insn))
7508 insn = prev_nonnote_insn (insn);
7512 target = gen_reg_rtx (GET_MODE (subtarget));
7513 insn = emit_move_insn (target, subtarget);
7516 /* Collect the data for the note. */
7517 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7518 vtbl_ref = plus_constant (vtbl_ref,
7519 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7520 /* Discard the initial CONST that was added. */
7521 vtbl_ref = XEXP (vtbl_ref, 0);
7524 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7529 /* Intended for a reference to a buffer of a file-object in Pascal.
7530 But it's not certain that a special tree code will really be
7531 necessary for these. INDIRECT_REF might work for them. */
7537 /* Pascal set IN expression.
7540 rlo = set_low - (set_low%bits_per_word);
7541 the_word = set [ (index - rlo)/bits_per_word ];
7542 bit_index = index % bits_per_word;
7543 bitmask = 1 << bit_index;
7544 return !!(the_word & bitmask); */
7546 tree set = TREE_OPERAND (exp, 0);
7547 tree index = TREE_OPERAND (exp, 1);
7548 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7549 tree set_type = TREE_TYPE (set);
7550 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7551 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
7552 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7553 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7554 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7555 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7556 rtx setaddr = XEXP (setval, 0);
7557 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7559 rtx diff, quo, rem, addr, bit, result;
7561 /* If domain is empty, answer is no. Likewise if index is constant
7562 and out of bounds. */
7563 if (((TREE_CODE (set_high_bound) == INTEGER_CST
7564 && TREE_CODE (set_low_bound) == INTEGER_CST
7565 && tree_int_cst_lt (set_high_bound, set_low_bound))
7566 || (TREE_CODE (index) == INTEGER_CST
7567 && TREE_CODE (set_low_bound) == INTEGER_CST
7568 && tree_int_cst_lt (index, set_low_bound))
7569 || (TREE_CODE (set_high_bound) == INTEGER_CST
7570 && TREE_CODE (index) == INTEGER_CST
7571 && tree_int_cst_lt (set_high_bound, index))))
7575 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7577 /* If we get here, we have to generate the code for both cases
7578 (in range and out of range). */
7580 op0 = gen_label_rtx ();
7581 op1 = gen_label_rtx ();
7583 if (! (GET_CODE (index_val) == CONST_INT
7584 && GET_CODE (lo_r) == CONST_INT))
7585 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7586 GET_MODE (index_val), iunsignedp, op1);
7588 if (! (GET_CODE (index_val) == CONST_INT
7589 && GET_CODE (hi_r) == CONST_INT))
7590 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7591 GET_MODE (index_val), iunsignedp, op1);
7593 /* Calculate the element number of bit zero in the first word
7595 if (GET_CODE (lo_r) == CONST_INT)
7596 rlow = GEN_INT (INTVAL (lo_r)
7597 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7599 rlow = expand_binop (index_mode, and_optab, lo_r,
7600 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
7601 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7603 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7604 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7606 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
7607 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7608 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
7609 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7611 addr = memory_address (byte_mode,
7612 expand_binop (index_mode, add_optab, diff,
7613 setaddr, NULL_RTX, iunsignedp,
7616 /* Extract the bit we want to examine. */
7617 bit = expand_shift (RSHIFT_EXPR, byte_mode,
7618 gen_rtx_MEM (byte_mode, addr),
7619 make_tree (TREE_TYPE (index), rem),
7621 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7622 GET_MODE (target) == byte_mode ? target : 0,
7623 1, OPTAB_LIB_WIDEN);
7625 if (result != target)
7626 convert_move (target, result, 1);
7628 /* Output the code to handle the out-of-range case. */
7631 emit_move_insn (target, const0_rtx);
7636 case WITH_CLEANUP_EXPR:
7637 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
7639 WITH_CLEANUP_EXPR_RTL (exp)
7640 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7641 expand_decl_cleanup_eh (NULL_TREE, TREE_OPERAND (exp, 1),
7642 CLEANUP_EH_ONLY (exp));
7644 /* That's it for this cleanup. */
7645 TREE_OPERAND (exp, 1) = 0;
7647 return WITH_CLEANUP_EXPR_RTL (exp);
7649 case CLEANUP_POINT_EXPR:
7651 /* Start a new binding layer that will keep track of all cleanup
7652 actions to be performed. */
7653 expand_start_bindings (2);
7655 target_temp_slot_level = temp_slot_level;
7657 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
7658 /* If we're going to use this value, load it up now. */
7660 op0 = force_not_mem (op0);
7661 preserve_temp_slots (op0);
7662 expand_end_bindings (NULL_TREE, 0, 0);
7667 /* Check for a built-in function. */
7668 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
7669 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7671 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7673 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7674 == BUILT_IN_FRONTEND)
7675 return (*lang_hooks.expand_expr)
7676 (exp, original_target, tmode, modifier);
7678 return expand_builtin (exp, target, subtarget, tmode, ignore);
7681 return expand_call (exp, target, ignore);
7683 case NON_LVALUE_EXPR:
7686 case REFERENCE_EXPR:
7687 if (TREE_OPERAND (exp, 0) == error_mark_node)
7690 if (TREE_CODE (type) == UNION_TYPE)
7692 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
7694 /* If both input and output are BLKmode, this conversion isn't doing
7695 anything except possibly changing memory attribute. */
7696 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7698 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7701 result = copy_rtx (result);
7702 set_mem_attributes (result, exp, 0);
7707 target = assign_temp (type, 0, 1, 1);
7709 if (GET_CODE (target) == MEM)
7710 /* Store data into beginning of memory target. */
7711 store_expr (TREE_OPERAND (exp, 0),
7712 adjust_address (target, TYPE_MODE (valtype), 0), 0);
7714 else if (GET_CODE (target) == REG)
7715 /* Store this field into a union of the proper type. */
7716 store_field (target,
7717 MIN ((int_size_in_bytes (TREE_TYPE
7718 (TREE_OPERAND (exp, 0)))
7720 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
7721 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7722 VOIDmode, 0, type, 0);
7726 /* Return the entire union. */
7730 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7732 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
7735 /* If the signedness of the conversion differs and OP0 is
7736 a promoted SUBREG, clear that indication since we now
7737 have to do the proper extension. */
7738 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7739 && GET_CODE (op0) == SUBREG)
7740 SUBREG_PROMOTED_VAR_P (op0) = 0;
7745 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7746 if (GET_MODE (op0) == mode)
7749 /* If OP0 is a constant, just convert it into the proper mode. */
7750 if (CONSTANT_P (op0))
7752 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7753 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7755 if (modifier == EXPAND_INITIALIZER)
7756 return simplify_gen_subreg (mode, op0, inner_mode,
7757 subreg_lowpart_offset (mode,
7760 return convert_modes (mode, inner_mode, op0,
7761 TREE_UNSIGNED (inner_type));
7764 if (modifier == EXPAND_INITIALIZER)
7765 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
7769 convert_to_mode (mode, op0,
7770 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7772 convert_move (target, op0,
7773 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7776 case VIEW_CONVERT_EXPR:
7777 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
7779 /* If the input and output modes are both the same, we are done.
7780 Otherwise, if neither mode is BLKmode and both are within a word, we
7781 can use gen_lowpart. If neither is true, make sure the operand is
7782 in memory and convert the MEM to the new mode. */
7783 if (TYPE_MODE (type) == GET_MODE (op0))
7785 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7786 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7787 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7788 op0 = gen_lowpart (TYPE_MODE (type), op0);
7789 else if (GET_CODE (op0) != MEM)
7791 /* If the operand is not a MEM, force it into memory. Since we
7792 are going to be be changing the mode of the MEM, don't call
7793 force_const_mem for constants because we don't allow pool
7794 constants to change mode. */
7795 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7797 if (TREE_ADDRESSABLE (exp))
7800 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7802 = assign_stack_temp_for_type
7803 (TYPE_MODE (inner_type),
7804 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
7806 emit_move_insn (target, op0);
7810 /* At this point, OP0 is in the correct mode. If the output type is such
7811 that the operand is known to be aligned, indicate that it is.
7812 Otherwise, we need only be concerned about alignment for non-BLKmode
7814 if (GET_CODE (op0) == MEM)
7816 op0 = copy_rtx (op0);
7818 if (TYPE_ALIGN_OK (type))
7819 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7820 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7821 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7823 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7824 HOST_WIDE_INT temp_size
7825 = MAX (int_size_in_bytes (inner_type),
7826 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
7827 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7828 temp_size, 0, type);
7829 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
7831 if (TREE_ADDRESSABLE (exp))
7834 if (GET_MODE (op0) == BLKmode)
7835 emit_block_move (new_with_op0_mode, op0,
7836 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
7839 emit_move_insn (new_with_op0_mode, op0);
7844 op0 = adjust_address (op0, TYPE_MODE (type), 0);
7850 this_optab = ! unsignedp && flag_trapv
7851 && (GET_MODE_CLASS (mode) == MODE_INT)
7852 ? addv_optab : add_optab;
7854 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7855 something else, make sure we add the register to the constant and
7856 then to the other thing. This case can occur during strength
7857 reduction and doing it this way will produce better code if the
7858 frame pointer or argument pointer is eliminated.
7860 fold-const.c will ensure that the constant is always in the inner
7861 PLUS_EXPR, so the only case we need to do anything about is if
7862 sp, ap, or fp is our second argument, in which case we must swap
7863 the innermost first argument and our second argument. */
7865 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7866 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7867 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7868 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7869 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7870 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7872 tree t = TREE_OPERAND (exp, 1);
7874 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7875 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7878 /* If the result is to be ptr_mode and we are adding an integer to
7879 something, we might be forming a constant. So try to use
7880 plus_constant. If it produces a sum and we can't accept it,
7881 use force_operand. This allows P = &ARR[const] to generate
7882 efficient code on machines where a SYMBOL_REF is not a valid
7885 If this is an EXPAND_SUM call, always return the sum. */
7886 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
7887 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
7889 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7890 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7891 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7895 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7897 /* Use immed_double_const to ensure that the constant is
7898 truncated according to the mode of OP1, then sign extended
7899 to a HOST_WIDE_INT. Using the constant directly can result
7900 in non-canonical RTL in a 64x32 cross compile. */
7902 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7904 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7905 op1 = plus_constant (op1, INTVAL (constant_part));
7906 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7907 op1 = force_operand (op1, target);
7911 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7912 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7913 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7917 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7918 (modifier == EXPAND_INITIALIZER
7919 ? EXPAND_INITIALIZER : EXPAND_SUM));
7920 if (! CONSTANT_P (op0))
7922 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7923 VOIDmode, modifier);
7924 /* Don't go to both_summands if modifier
7925 says it's not right to return a PLUS. */
7926 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7930 /* Use immed_double_const to ensure that the constant is
7931 truncated according to the mode of OP1, then sign extended
7932 to a HOST_WIDE_INT. Using the constant directly can result
7933 in non-canonical RTL in a 64x32 cross compile. */
7935 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7937 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7938 op0 = plus_constant (op0, INTVAL (constant_part));
7939 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7940 op0 = force_operand (op0, target);
7945 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
7948 /* No sense saving up arithmetic to be done
7949 if it's all in the wrong mode to form part of an address.
7950 And force_operand won't know whether to sign-extend or
7952 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7953 || mode != ptr_mode)
7955 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7956 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
7957 if (op0 == const0_rtx)
7959 if (op1 == const0_rtx)
7964 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
7965 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
7967 /* We come here from MINUS_EXPR when the second operand is a
7970 /* Make sure any term that's a sum with a constant comes last. */
7971 if (GET_CODE (op0) == PLUS
7972 && CONSTANT_P (XEXP (op0, 1)))
7978 /* If adding to a sum including a constant,
7979 associate it to put the constant outside. */
7980 if (GET_CODE (op1) == PLUS
7981 && CONSTANT_P (XEXP (op1, 1)))
7983 rtx constant_term = const0_rtx;
7985 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7988 /* Ensure that MULT comes first if there is one. */
7989 else if (GET_CODE (op0) == MULT)
7990 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
7992 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
7994 /* Let's also eliminate constants from op0 if possible. */
7995 op0 = eliminate_constant_term (op0, &constant_term);
7997 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7998 their sum should be a constant. Form it into OP1, since the
7999 result we want will then be OP0 + OP1. */
8001 temp = simplify_binary_operation (PLUS, mode, constant_term,
8006 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
8009 /* Put a constant term last and put a multiplication first. */
8010 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
8011 temp = op1, op1 = op0, op0 = temp;
8013 temp = simplify_binary_operation (PLUS, mode, op0, op1);
8014 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
8017 /* For initializers, we are allowed to return a MINUS of two
8018 symbolic constants. Here we handle all cases when both operands
8020 /* Handle difference of two symbolic constants,
8021 for the sake of an initializer. */
8022 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8023 && really_constant_p (TREE_OPERAND (exp, 0))
8024 && really_constant_p (TREE_OPERAND (exp, 1)))
8026 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
8028 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
8031 /* If the last operand is a CONST_INT, use plus_constant of
8032 the negated constant. Else make the MINUS. */
8033 if (GET_CODE (op1) == CONST_INT)
8034 return plus_constant (op0, - INTVAL (op1));
8036 return gen_rtx_MINUS (mode, op0, op1);
8039 this_optab = ! unsignedp && flag_trapv
8040 && (GET_MODE_CLASS(mode) == MODE_INT)
8041 ? subv_optab : sub_optab;
8043 /* No sense saving up arithmetic to be done
8044 if it's all in the wrong mode to form part of an address.
8045 And force_operand won't know whether to sign-extend or
8047 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8048 || mode != ptr_mode)
8051 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8054 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
8055 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
8057 /* Convert A - const to A + (-const). */
8058 if (GET_CODE (op1) == CONST_INT)
8060 op1 = negate_rtx (mode, op1);
8067 /* If first operand is constant, swap them.
8068 Thus the following special case checks need only
8069 check the second operand. */
8070 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
8072 tree t1 = TREE_OPERAND (exp, 0);
8073 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
8074 TREE_OPERAND (exp, 1) = t1;
8077 /* Attempt to return something suitable for generating an
8078 indexed address, for machines that support that. */
8080 if (modifier == EXPAND_SUM && mode == ptr_mode
8081 && host_integerp (TREE_OPERAND (exp, 1), 0))
8083 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8086 /* If we knew for certain that this is arithmetic for an array
8087 reference, and we knew the bounds of the array, then we could
8088 apply the distributive law across (PLUS X C) for constant C.
8089 Without such knowledge, we risk overflowing the computation
8090 when both X and C are large, but X+C isn't. */
8091 /* ??? Could perhaps special-case EXP being unsigned and C being
8092 positive. In that case we are certain that X+C is no smaller
8093 than X and so the transformed expression will overflow iff the
8094 original would have. */
8096 if (GET_CODE (op0) != REG)
8097 op0 = force_operand (op0, NULL_RTX);
8098 if (GET_CODE (op0) != REG)
8099 op0 = copy_to_mode_reg (mode, op0);
8102 gen_rtx_MULT (mode, op0,
8103 GEN_INT (tree_low_cst (TREE_OPERAND (exp, 1), 0)));
8106 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8109 /* Check for multiplying things that have been extended
8110 from a narrower type. If this machine supports multiplying
8111 in that narrower type with a result in the desired type,
8112 do it that way, and avoid the explicit type-conversion. */
8113 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
8114 && TREE_CODE (type) == INTEGER_TYPE
8115 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8116 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8117 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8118 && int_fits_type_p (TREE_OPERAND (exp, 1),
8119 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8120 /* Don't use a widening multiply if a shift will do. */
8121 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
8122 > HOST_BITS_PER_WIDE_INT)
8123 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
8125 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8126 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8128 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
8129 /* If both operands are extended, they must either both
8130 be zero-extended or both be sign-extended. */
8131 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8133 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
8135 enum machine_mode innermode
8136 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
8137 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8138 ? smul_widen_optab : umul_widen_optab);
8139 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8140 ? umul_widen_optab : smul_widen_optab);
8141 if (mode == GET_MODE_WIDER_MODE (innermode))
8143 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
8145 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8146 NULL_RTX, VOIDmode, 0);
8147 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8148 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
8151 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8152 NULL_RTX, VOIDmode, 0);
8155 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
8156 && innermode == word_mode)
8159 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8160 NULL_RTX, VOIDmode, 0);
8161 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8162 op1 = convert_modes (innermode, mode,
8163 expand_expr (TREE_OPERAND (exp, 1),
8164 NULL_RTX, VOIDmode, 0),
8167 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8168 NULL_RTX, VOIDmode, 0);
8169 temp = expand_binop (mode, other_optab, op0, op1, target,
8170 unsignedp, OPTAB_LIB_WIDEN);
8171 htem = expand_mult_highpart_adjust (innermode,
8172 gen_highpart (innermode, temp),
8174 gen_highpart (innermode, temp),
8176 emit_move_insn (gen_highpart (innermode, temp), htem);
8181 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8182 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8183 return expand_mult (mode, op0, op1, target, unsignedp);
8185 case TRUNC_DIV_EXPR:
8186 case FLOOR_DIV_EXPR:
8188 case ROUND_DIV_EXPR:
8189 case EXACT_DIV_EXPR:
8190 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8192 /* Possible optimization: compute the dividend with EXPAND_SUM
8193 then if the divisor is constant can optimize the case
8194 where some terms of the dividend have coeffs divisible by it. */
8195 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8196 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8197 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8200 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
8201 expensive divide. If not, combine will rebuild the original
8203 if (flag_unsafe_math_optimizations && optimize && !optimize_size
8204 && TREE_CODE (type) == REAL_TYPE
8205 && !real_onep (TREE_OPERAND (exp, 0)))
8206 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
8207 build (RDIV_EXPR, type,
8208 build_real (type, dconst1),
8209 TREE_OPERAND (exp, 1))),
8210 target, tmode, unsignedp);
8211 this_optab = sdiv_optab;
8214 case TRUNC_MOD_EXPR:
8215 case FLOOR_MOD_EXPR:
8217 case ROUND_MOD_EXPR:
8218 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8220 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8221 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8222 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8224 case FIX_ROUND_EXPR:
8225 case FIX_FLOOR_EXPR:
8227 abort (); /* Not used for C. */
8229 case FIX_TRUNC_EXPR:
8230 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8232 target = gen_reg_rtx (mode);
8233 expand_fix (target, op0, unsignedp);
8237 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8239 target = gen_reg_rtx (mode);
8240 /* expand_float can't figure out what to do if FROM has VOIDmode.
8241 So give it the correct mode. With -O, cse will optimize this. */
8242 if (GET_MODE (op0) == VOIDmode)
8243 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8245 expand_float (target, op0,
8246 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8250 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8251 temp = expand_unop (mode,
8252 ! unsignedp && flag_trapv
8253 && (GET_MODE_CLASS(mode) == MODE_INT)
8254 ? negv_optab : neg_optab, op0, target, 0);
8260 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8262 /* Handle complex values specially. */
8263 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
8264 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
8265 return expand_complex_abs (mode, op0, target, unsignedp);
8267 /* Unsigned abs is simply the operand. Testing here means we don't
8268 risk generating incorrect code below. */
8269 if (TREE_UNSIGNED (type))
8272 return expand_abs (mode, op0, target, unsignedp,
8273 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
8277 target = original_target;
8278 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
8279 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
8280 || GET_MODE (target) != mode
8281 || (GET_CODE (target) == REG
8282 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8283 target = gen_reg_rtx (mode);
8284 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8285 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8287 /* First try to do it with a special MIN or MAX instruction.
8288 If that does not win, use a conditional jump to select the proper
8290 this_optab = (TREE_UNSIGNED (type)
8291 ? (code == MIN_EXPR ? umin_optab : umax_optab)
8292 : (code == MIN_EXPR ? smin_optab : smax_optab));
8294 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8299 /* At this point, a MEM target is no longer useful; we will get better
8302 if (GET_CODE (target) == MEM)
8303 target = gen_reg_rtx (mode);
8306 emit_move_insn (target, op0);
8308 op0 = gen_label_rtx ();
8310 /* If this mode is an integer too wide to compare properly,
8311 compare word by word. Rely on cse to optimize constant cases. */
8312 if (GET_MODE_CLASS (mode) == MODE_INT
8313 && ! can_compare_p (GE, mode, ccp_jump))
8315 if (code == MAX_EXPR)
8316 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8317 target, op1, NULL_RTX, op0);
8319 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8320 op1, target, NULL_RTX, op0);
8324 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
8325 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
8326 unsignedp, mode, NULL_RTX, NULL_RTX,
8329 emit_move_insn (target, op1);
8334 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8335 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8341 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8342 temp = expand_unop (mode, ffs_optab, op0, target, 1);
8347 /* ??? Can optimize bitwise operations with one arg constant.
8348 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8349 and (a bitwise1 b) bitwise2 b (etc)
8350 but that is probably not worth while. */
8352 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8353 boolean values when we want in all cases to compute both of them. In
8354 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8355 as actual zero-or-1 values and then bitwise anding. In cases where
8356 there cannot be any side effects, better code would be made by
8357 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8358 how to recognize those cases. */
8360 case TRUTH_AND_EXPR:
8362 this_optab = and_optab;
8367 this_optab = ior_optab;
8370 case TRUTH_XOR_EXPR:
8372 this_optab = xor_optab;
8379 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8381 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8382 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8385 /* Could determine the answer when only additive constants differ. Also,
8386 the addition of one can be handled by changing the condition. */
8393 case UNORDERED_EXPR:
8400 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
8404 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
8405 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8407 && GET_CODE (original_target) == REG
8408 && (GET_MODE (original_target)
8409 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8411 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8414 /* If temp is constant, we can just compute the result. */
8415 if (GET_CODE (temp) == CONST_INT)
8417 if (INTVAL (temp) != 0)
8418 emit_move_insn (target, const1_rtx);
8420 emit_move_insn (target, const0_rtx);
8425 if (temp != original_target)
8427 enum machine_mode mode1 = GET_MODE (temp);
8428 if (mode1 == VOIDmode)
8429 mode1 = tmode != VOIDmode ? tmode : mode;
8431 temp = copy_to_mode_reg (mode1, temp);
8434 op1 = gen_label_rtx ();
8435 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
8436 GET_MODE (temp), unsignedp, op1);
8437 emit_move_insn (temp, const1_rtx);
8442 /* If no set-flag instruction, must generate a conditional
8443 store into a temporary variable. Drop through
8444 and handle this like && and ||. */
8446 case TRUTH_ANDIF_EXPR:
8447 case TRUTH_ORIF_EXPR:
8449 && (target == 0 || ! safe_from_p (target, exp, 1)
8450 /* Make sure we don't have a hard reg (such as function's return
8451 value) live across basic blocks, if not optimizing. */
8452 || (!optimize && GET_CODE (target) == REG
8453 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
8454 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
8457 emit_clr_insn (target);
8459 op1 = gen_label_rtx ();
8460 jumpifnot (exp, op1);
8463 emit_0_to_1_insn (target);
8466 return ignore ? const0_rtx : target;
8468 case TRUTH_NOT_EXPR:
8469 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8470 /* The parser is careful to generate TRUTH_NOT_EXPR
8471 only with operands that are always zero or one. */
8472 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
8473 target, 1, OPTAB_LIB_WIDEN);
8479 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8481 return expand_expr (TREE_OPERAND (exp, 1),
8482 (ignore ? const0_rtx : target),
8486 /* If we would have a "singleton" (see below) were it not for a
8487 conversion in each arm, bring that conversion back out. */
8488 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8489 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8490 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8491 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8493 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8494 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8496 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8497 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8498 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8499 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8500 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8501 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8502 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8503 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
8504 return expand_expr (build1 (NOP_EXPR, type,
8505 build (COND_EXPR, TREE_TYPE (iftrue),
8506 TREE_OPERAND (exp, 0),
8508 target, tmode, modifier);
8512 /* Note that COND_EXPRs whose type is a structure or union
8513 are required to be constructed to contain assignments of
8514 a temporary variable, so that we can evaluate them here
8515 for side effect only. If type is void, we must do likewise. */
8517 /* If an arm of the branch requires a cleanup,
8518 only that cleanup is performed. */
8521 tree binary_op = 0, unary_op = 0;
8523 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8524 convert it to our mode, if necessary. */
8525 if (integer_onep (TREE_OPERAND (exp, 1))
8526 && integer_zerop (TREE_OPERAND (exp, 2))
8527 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8531 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
8536 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
8537 if (GET_MODE (op0) == mode)
8541 target = gen_reg_rtx (mode);
8542 convert_move (target, op0, unsignedp);
8546 /* Check for X ? A + B : A. If we have this, we can copy A to the
8547 output and conditionally add B. Similarly for unary operations.
8548 Don't do this if X has side-effects because those side effects
8549 might affect A or B and the "?" operation is a sequence point in
8550 ANSI. (operand_equal_p tests for side effects.) */
8552 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8553 && operand_equal_p (TREE_OPERAND (exp, 2),
8554 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8555 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8556 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8557 && operand_equal_p (TREE_OPERAND (exp, 1),
8558 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8559 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8560 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8561 && operand_equal_p (TREE_OPERAND (exp, 2),
8562 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8563 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8564 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8565 && operand_equal_p (TREE_OPERAND (exp, 1),
8566 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8567 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8569 /* If we are not to produce a result, we have no target. Otherwise,
8570 if a target was specified use it; it will not be used as an
8571 intermediate target unless it is safe. If no target, use a
8576 else if (original_target
8577 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
8578 || (singleton && GET_CODE (original_target) == REG
8579 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8580 && original_target == var_rtx (singleton)))
8581 && GET_MODE (original_target) == mode
8582 #ifdef HAVE_conditional_move
8583 && (! can_conditionally_move_p (mode)
8584 || GET_CODE (original_target) == REG
8585 || TREE_ADDRESSABLE (type))
8587 && (GET_CODE (original_target) != MEM
8588 || TREE_ADDRESSABLE (type)))
8589 temp = original_target;
8590 else if (TREE_ADDRESSABLE (type))
8593 temp = assign_temp (type, 0, 0, 1);
8595 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8596 do the test of X as a store-flag operation, do this as
8597 A + ((X != 0) << log C). Similarly for other simple binary
8598 operators. Only do for C == 1 if BRANCH_COST is low. */
8599 if (temp && singleton && binary_op
8600 && (TREE_CODE (binary_op) == PLUS_EXPR
8601 || TREE_CODE (binary_op) == MINUS_EXPR
8602 || TREE_CODE (binary_op) == BIT_IOR_EXPR
8603 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
8604 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8605 : integer_onep (TREE_OPERAND (binary_op, 1)))
8606 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8610 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8611 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8612 ? addv_optab : add_optab)
8613 : TREE_CODE (binary_op) == MINUS_EXPR
8614 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8615 ? subv_optab : sub_optab)
8616 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8619 /* If we had X ? A : A + 1, do this as A + (X == 0). */
8620 if (singleton == TREE_OPERAND (exp, 1))
8621 cond = invert_truthvalue (TREE_OPERAND (exp, 0));
8623 cond = TREE_OPERAND (exp, 0);
8625 result = do_store_flag (cond, (safe_from_p (temp, singleton, 1)
8627 mode, BRANCH_COST <= 1);
8629 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8630 result = expand_shift (LSHIFT_EXPR, mode, result,
8631 build_int_2 (tree_log2
8635 (safe_from_p (temp, singleton, 1)
8636 ? temp : NULL_RTX), 0);
8640 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
8641 return expand_binop (mode, boptab, op1, result, temp,
8642 unsignedp, OPTAB_LIB_WIDEN);
8646 do_pending_stack_adjust ();
8648 op0 = gen_label_rtx ();
8650 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8654 /* If the target conflicts with the other operand of the
8655 binary op, we can't use it. Also, we can't use the target
8656 if it is a hard register, because evaluating the condition
8657 might clobber it. */
8659 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
8660 || (GET_CODE (temp) == REG
8661 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8662 temp = gen_reg_rtx (mode);
8663 store_expr (singleton, temp, 0);
8666 expand_expr (singleton,
8667 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8668 if (singleton == TREE_OPERAND (exp, 1))
8669 jumpif (TREE_OPERAND (exp, 0), op0);
8671 jumpifnot (TREE_OPERAND (exp, 0), op0);
8673 start_cleanup_deferral ();
8674 if (binary_op && temp == 0)
8675 /* Just touch the other operand. */
8676 expand_expr (TREE_OPERAND (binary_op, 1),
8677 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8679 store_expr (build (TREE_CODE (binary_op), type,
8680 make_tree (type, temp),
8681 TREE_OPERAND (binary_op, 1)),
8684 store_expr (build1 (TREE_CODE (unary_op), type,
8685 make_tree (type, temp)),
8689 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8690 comparison operator. If we have one of these cases, set the
8691 output to A, branch on A (cse will merge these two references),
8692 then set the output to FOO. */
8694 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8695 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8696 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8697 TREE_OPERAND (exp, 1), 0)
8698 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8699 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
8700 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
8702 if (GET_CODE (temp) == REG
8703 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8704 temp = gen_reg_rtx (mode);
8705 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8706 jumpif (TREE_OPERAND (exp, 0), op0);
8708 start_cleanup_deferral ();
8709 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8713 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8714 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8715 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8716 TREE_OPERAND (exp, 2), 0)
8717 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8718 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
8719 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
8721 if (GET_CODE (temp) == REG
8722 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
8723 temp = gen_reg_rtx (mode);
8724 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8725 jumpifnot (TREE_OPERAND (exp, 0), op0);
8727 start_cleanup_deferral ();
8728 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8733 op1 = gen_label_rtx ();
8734 jumpifnot (TREE_OPERAND (exp, 0), op0);
8736 start_cleanup_deferral ();
8738 /* One branch of the cond can be void, if it never returns. For
8739 example A ? throw : E */
8741 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8742 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8744 expand_expr (TREE_OPERAND (exp, 1),
8745 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8746 end_cleanup_deferral ();
8748 emit_jump_insn (gen_jump (op1));
8751 start_cleanup_deferral ();
8753 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8754 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8756 expand_expr (TREE_OPERAND (exp, 2),
8757 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
8760 end_cleanup_deferral ();
8771 /* Something needs to be initialized, but we didn't know
8772 where that thing was when building the tree. For example,
8773 it could be the return value of a function, or a parameter
8774 to a function which lays down in the stack, or a temporary
8775 variable which must be passed by reference.
8777 We guarantee that the expression will either be constructed
8778 or copied into our original target. */
8780 tree slot = TREE_OPERAND (exp, 0);
8781 tree cleanups = NULL_TREE;
8784 if (TREE_CODE (slot) != VAR_DECL)
8788 target = original_target;
8790 /* Set this here so that if we get a target that refers to a
8791 register variable that's already been used, put_reg_into_stack
8792 knows that it should fix up those uses. */
8793 TREE_USED (slot) = 1;
8797 if (DECL_RTL_SET_P (slot))
8799 target = DECL_RTL (slot);
8800 /* If we have already expanded the slot, so don't do
8802 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8807 target = assign_temp (type, 2, 0, 1);
8808 /* All temp slots at this level must not conflict. */
8809 preserve_temp_slots (target);
8810 SET_DECL_RTL (slot, target);
8811 if (TREE_ADDRESSABLE (slot))
8812 put_var_into_stack (slot);
8814 /* Since SLOT is not known to the called function
8815 to belong to its stack frame, we must build an explicit
8816 cleanup. This case occurs when we must build up a reference
8817 to pass the reference as an argument. In this case,
8818 it is very likely that such a reference need not be
8821 if (TREE_OPERAND (exp, 2) == 0)
8822 TREE_OPERAND (exp, 2)
8823 = (*lang_hooks.maybe_build_cleanup) (slot);
8824 cleanups = TREE_OPERAND (exp, 2);
8829 /* This case does occur, when expanding a parameter which
8830 needs to be constructed on the stack. The target
8831 is the actual stack address that we want to initialize.
8832 The function we call will perform the cleanup in this case. */
8834 /* If we have already assigned it space, use that space,
8835 not target that we were passed in, as our target
8836 parameter is only a hint. */
8837 if (DECL_RTL_SET_P (slot))
8839 target = DECL_RTL (slot);
8840 /* If we have already expanded the slot, so don't do
8842 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8847 SET_DECL_RTL (slot, target);
8848 /* If we must have an addressable slot, then make sure that
8849 the RTL that we just stored in slot is OK. */
8850 if (TREE_ADDRESSABLE (slot))
8851 put_var_into_stack (slot);
8855 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
8856 /* Mark it as expanded. */
8857 TREE_OPERAND (exp, 1) = NULL_TREE;
8859 store_expr (exp1, target, 0);
8861 expand_decl_cleanup_eh (NULL_TREE, cleanups, CLEANUP_EH_ONLY (exp));
8868 tree lhs = TREE_OPERAND (exp, 0);
8869 tree rhs = TREE_OPERAND (exp, 1);
8871 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8877 /* If lhs is complex, expand calls in rhs before computing it.
8878 That's so we don't compute a pointer and save it over a
8879 call. If lhs is simple, compute it first so we can give it
8880 as a target if the rhs is just a call. This avoids an
8881 extra temp and copy and that prevents a partial-subsumption
8882 which makes bad code. Actually we could treat
8883 component_ref's of vars like vars. */
8885 tree lhs = TREE_OPERAND (exp, 0);
8886 tree rhs = TREE_OPERAND (exp, 1);
8890 /* Check for |= or &= of a bitfield of size one into another bitfield
8891 of size 1. In this case, (unless we need the result of the
8892 assignment) we can do this more efficiently with a
8893 test followed by an assignment, if necessary.
8895 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8896 things change so we do, this code should be enhanced to
8899 && TREE_CODE (lhs) == COMPONENT_REF
8900 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8901 || TREE_CODE (rhs) == BIT_AND_EXPR)
8902 && TREE_OPERAND (rhs, 0) == lhs
8903 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8904 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8905 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
8907 rtx label = gen_label_rtx ();
8909 do_jump (TREE_OPERAND (rhs, 1),
8910 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8911 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8912 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8913 (TREE_CODE (rhs) == BIT_IOR_EXPR
8915 : integer_zero_node)),
8917 do_pending_stack_adjust ();
8922 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8928 if (!TREE_OPERAND (exp, 0))
8929 expand_null_return ();
8931 expand_return (TREE_OPERAND (exp, 0));
8934 case PREINCREMENT_EXPR:
8935 case PREDECREMENT_EXPR:
8936 return expand_increment (exp, 0, ignore);
8938 case POSTINCREMENT_EXPR:
8939 case POSTDECREMENT_EXPR:
8940 /* Faster to treat as pre-increment if result is not used. */
8941 return expand_increment (exp, ! ignore, ignore);
8944 /* Are we taking the address of a nested function? */
8945 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
8946 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
8947 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8948 && ! TREE_STATIC (exp))
8950 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8951 op0 = force_operand (op0, target);
8953 /* If we are taking the address of something erroneous, just
8955 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8957 /* If we are taking the address of a constant and are at the
8958 top level, we have to use output_constant_def since we can't
8959 call force_const_mem at top level. */
8961 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8962 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
8964 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
8967 /* We make sure to pass const0_rtx down if we came in with
8968 ignore set, to avoid doing the cleanups twice for something. */
8969 op0 = expand_expr (TREE_OPERAND (exp, 0),
8970 ignore ? const0_rtx : NULL_RTX, VOIDmode,
8971 (modifier == EXPAND_INITIALIZER
8972 ? modifier : EXPAND_CONST_ADDRESS));
8974 /* If we are going to ignore the result, OP0 will have been set
8975 to const0_rtx, so just return it. Don't get confused and
8976 think we are taking the address of the constant. */
8980 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
8981 clever and returns a REG when given a MEM. */
8982 op0 = protect_from_queue (op0, 1);
8984 /* We would like the object in memory. If it is a constant, we can
8985 have it be statically allocated into memory. For a non-constant,
8986 we need to allocate some memory and store the value into it. */
8988 if (CONSTANT_P (op0))
8989 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8991 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8992 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8993 || GET_CODE (op0) == PARALLEL)
8995 /* If the operand is a SAVE_EXPR, we can deal with this by
8996 forcing the SAVE_EXPR into memory. */
8997 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
8999 put_var_into_stack (TREE_OPERAND (exp, 0));
9000 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
9004 /* If this object is in a register, it can't be BLKmode. */
9005 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9006 rtx memloc = assign_temp (inner_type, 1, 1, 1);
9008 if (GET_CODE (op0) == PARALLEL)
9009 /* Handle calls that pass values in multiple
9010 non-contiguous locations. The Irix 6 ABI has examples
9012 emit_group_store (memloc, op0,
9013 int_size_in_bytes (inner_type));
9015 emit_move_insn (memloc, op0);
9021 if (GET_CODE (op0) != MEM)
9024 mark_temp_addr_taken (op0);
9025 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
9027 op0 = XEXP (op0, 0);
9028 #ifdef POINTERS_EXTEND_UNSIGNED
9029 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
9030 && mode == ptr_mode)
9031 op0 = convert_memory_address (ptr_mode, op0);
9036 /* If OP0 is not aligned as least as much as the type requires, we
9037 need to make a temporary, copy OP0 to it, and take the address of
9038 the temporary. We want to use the alignment of the type, not of
9039 the operand. Note that this is incorrect for FUNCTION_TYPE, but
9040 the test for BLKmode means that can't happen. The test for
9041 BLKmode is because we never make mis-aligned MEMs with
9044 We don't need to do this at all if the machine doesn't have
9045 strict alignment. */
9046 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
9047 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
9049 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
9051 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9053 = assign_stack_temp_for_type
9054 (TYPE_MODE (inner_type),
9055 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
9056 : int_size_in_bytes (inner_type),
9057 1, build_qualified_type (inner_type,
9058 (TYPE_QUALS (inner_type)
9059 | TYPE_QUAL_CONST)));
9061 if (TYPE_ALIGN_OK (inner_type))
9064 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)),
9069 op0 = force_operand (XEXP (op0, 0), target);
9073 && GET_CODE (op0) != REG
9074 && modifier != EXPAND_CONST_ADDRESS
9075 && modifier != EXPAND_INITIALIZER
9076 && modifier != EXPAND_SUM)
9077 op0 = force_reg (Pmode, op0);
9079 if (GET_CODE (op0) == REG
9080 && ! REG_USERVAR_P (op0))
9081 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
9083 #ifdef POINTERS_EXTEND_UNSIGNED
9084 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
9085 && mode == ptr_mode)
9086 op0 = convert_memory_address (ptr_mode, op0);
9091 case ENTRY_VALUE_EXPR:
9094 /* COMPLEX type for Extended Pascal & Fortran */
9097 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9100 /* Get the rtx code of the operands. */
9101 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9102 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
9105 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
9109 /* Move the real (op0) and imaginary (op1) parts to their location. */
9110 emit_move_insn (gen_realpart (mode, target), op0);
9111 emit_move_insn (gen_imagpart (mode, target), op1);
9113 insns = get_insns ();
9116 /* Complex construction should appear as a single unit. */
9117 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
9118 each with a separate pseudo as destination.
9119 It's not correct for flow to treat them as a unit. */
9120 if (GET_CODE (target) != CONCAT)
9121 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
9129 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9130 return gen_realpart (mode, op0);
9133 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9134 return gen_imagpart (mode, op0);
9138 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
9142 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9145 target = gen_reg_rtx (mode);
9149 /* Store the realpart and the negated imagpart to target. */
9150 emit_move_insn (gen_realpart (partmode, target),
9151 gen_realpart (partmode, op0));
9153 imag_t = gen_imagpart (partmode, target);
9154 temp = expand_unop (partmode,
9155 ! unsignedp && flag_trapv
9156 && (GET_MODE_CLASS(partmode) == MODE_INT)
9157 ? negv_optab : neg_optab,
9158 gen_imagpart (partmode, op0), imag_t, 0);
9160 emit_move_insn (imag_t, temp);
9162 insns = get_insns ();
9165 /* Conjugate should appear as a single unit
9166 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
9167 each with a separate pseudo as destination.
9168 It's not correct for flow to treat them as a unit. */
9169 if (GET_CODE (target) != CONCAT)
9170 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
9177 case TRY_CATCH_EXPR:
9179 tree handler = TREE_OPERAND (exp, 1);
9181 expand_eh_region_start ();
9183 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9185 expand_eh_region_end_cleanup (handler);
9190 case TRY_FINALLY_EXPR:
9192 tree try_block = TREE_OPERAND (exp, 0);
9193 tree finally_block = TREE_OPERAND (exp, 1);
9195 if (!optimize || unsafe_for_reeval (finally_block) > 1)
9197 /* In this case, wrapping FINALLY_BLOCK in an UNSAVE_EXPR
9198 is not sufficient, so we cannot expand the block twice.
9199 So we play games with GOTO_SUBROUTINE_EXPR to let us
9200 expand the thing only once. */
9201 /* When not optimizing, we go ahead with this form since
9202 (1) user breakpoints operate more predictably without
9203 code duplication, and
9204 (2) we're not running any of the global optimizers
9205 that would explode in time/space with the highly
9206 connected CFG created by the indirect branching. */
9208 rtx finally_label = gen_label_rtx ();
9209 rtx done_label = gen_label_rtx ();
9210 rtx return_link = gen_reg_rtx (Pmode);
9211 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
9212 (tree) finally_label, (tree) return_link);
9213 TREE_SIDE_EFFECTS (cleanup) = 1;
9215 /* Start a new binding layer that will keep track of all cleanup
9216 actions to be performed. */
9217 expand_start_bindings (2);
9218 target_temp_slot_level = temp_slot_level;
9220 expand_decl_cleanup (NULL_TREE, cleanup);
9221 op0 = expand_expr (try_block, target, tmode, modifier);
9223 preserve_temp_slots (op0);
9224 expand_end_bindings (NULL_TREE, 0, 0);
9225 emit_jump (done_label);
9226 emit_label (finally_label);
9227 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
9228 emit_indirect_jump (return_link);
9229 emit_label (done_label);
9233 expand_start_bindings (2);
9234 target_temp_slot_level = temp_slot_level;
9236 expand_decl_cleanup (NULL_TREE, finally_block);
9237 op0 = expand_expr (try_block, target, tmode, modifier);
9239 preserve_temp_slots (op0);
9240 expand_end_bindings (NULL_TREE, 0, 0);
9246 case GOTO_SUBROUTINE_EXPR:
9248 rtx subr = (rtx) TREE_OPERAND (exp, 0);
9249 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
9250 rtx return_address = gen_label_rtx ();
9251 emit_move_insn (return_link,
9252 gen_rtx_LABEL_REF (Pmode, return_address));
9254 emit_label (return_address);
9259 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
9262 return get_exception_pointer (cfun);
9265 /* Function descriptors are not valid except for as
9266 initialization constants, and should not be expanded. */
9270 return (*lang_hooks.expand_expr) (exp, original_target, tmode, modifier);
9273 /* Here to do an ordinary binary operator, generating an instruction
9274 from the optab already placed in `this_optab'. */
9276 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
9278 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
9279 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9281 temp = expand_binop (mode, this_optab, op0, op1, target,
9282 unsignedp, OPTAB_LIB_WIDEN);
9288 /* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9289 when applied to the address of EXP produces an address known to be
9290 aligned more than BIGGEST_ALIGNMENT. */
9293 is_aligning_offset (offset, exp)
9297 /* Strip off any conversions and WITH_RECORD_EXPR nodes. */
9298 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9299 || TREE_CODE (offset) == NOP_EXPR
9300 || TREE_CODE (offset) == CONVERT_EXPR
9301 || TREE_CODE (offset) == WITH_RECORD_EXPR)
9302 offset = TREE_OPERAND (offset, 0);
9304 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9305 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9306 if (TREE_CODE (offset) != BIT_AND_EXPR
9307 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9308 || compare_tree_int (TREE_OPERAND (offset, 1), BIGGEST_ALIGNMENT) <= 0
9309 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9312 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9313 It must be NEGATE_EXPR. Then strip any more conversions. */
9314 offset = TREE_OPERAND (offset, 0);
9315 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9316 || TREE_CODE (offset) == NOP_EXPR
9317 || TREE_CODE (offset) == CONVERT_EXPR)
9318 offset = TREE_OPERAND (offset, 0);
9320 if (TREE_CODE (offset) != NEGATE_EXPR)
9323 offset = TREE_OPERAND (offset, 0);
9324 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9325 || TREE_CODE (offset) == NOP_EXPR
9326 || TREE_CODE (offset) == CONVERT_EXPR)
9327 offset = TREE_OPERAND (offset, 0);
9329 /* This must now be the address either of EXP or of a PLACEHOLDER_EXPR
9330 whose type is the same as EXP. */
9331 return (TREE_CODE (offset) == ADDR_EXPR
9332 && (TREE_OPERAND (offset, 0) == exp
9333 || (TREE_CODE (TREE_OPERAND (offset, 0)) == PLACEHOLDER_EXPR
9334 && (TREE_TYPE (TREE_OPERAND (offset, 0))
9335 == TREE_TYPE (exp)))));
9338 /* Return the tree node if an ARG corresponds to a string constant or zero
9339 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
9340 in bytes within the string that ARG is accessing. The type of the
9341 offset will be `sizetype'. */
9344 string_constant (arg, ptr_offset)
9350 if (TREE_CODE (arg) == ADDR_EXPR
9351 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9353 *ptr_offset = size_zero_node;
9354 return TREE_OPERAND (arg, 0);
9356 else if (TREE_CODE (arg) == PLUS_EXPR)
9358 tree arg0 = TREE_OPERAND (arg, 0);
9359 tree arg1 = TREE_OPERAND (arg, 1);
9364 if (TREE_CODE (arg0) == ADDR_EXPR
9365 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
9367 *ptr_offset = convert (sizetype, arg1);
9368 return TREE_OPERAND (arg0, 0);
9370 else if (TREE_CODE (arg1) == ADDR_EXPR
9371 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
9373 *ptr_offset = convert (sizetype, arg0);
9374 return TREE_OPERAND (arg1, 0);
9381 /* Expand code for a post- or pre- increment or decrement
9382 and return the RTX for the result.
9383 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
9386 expand_increment (exp, post, ignore)
9392 tree incremented = TREE_OPERAND (exp, 0);
9393 optab this_optab = add_optab;
9395 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9396 int op0_is_copy = 0;
9397 int single_insn = 0;
9398 /* 1 means we can't store into OP0 directly,
9399 because it is a subreg narrower than a word,
9400 and we don't dare clobber the rest of the word. */
9403 /* Stabilize any component ref that might need to be
9404 evaluated more than once below. */
9406 || TREE_CODE (incremented) == BIT_FIELD_REF
9407 || (TREE_CODE (incremented) == COMPONENT_REF
9408 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9409 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9410 incremented = stabilize_reference (incremented);
9411 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9412 ones into save exprs so that they don't accidentally get evaluated
9413 more than once by the code below. */
9414 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9415 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9416 incremented = save_expr (incremented);
9418 /* Compute the operands as RTX.
9419 Note whether OP0 is the actual lvalue or a copy of it:
9420 I believe it is a copy iff it is a register or subreg
9421 and insns were generated in computing it. */
9423 temp = get_last_insn ();
9424 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
9426 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9427 in place but instead must do sign- or zero-extension during assignment,
9428 so we copy it into a new register and let the code below use it as
9431 Note that we can safely modify this SUBREG since it is know not to be
9432 shared (it was made by the expand_expr call above). */
9434 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9437 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9441 else if (GET_CODE (op0) == SUBREG
9442 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9444 /* We cannot increment this SUBREG in place. If we are
9445 post-incrementing, get a copy of the old value. Otherwise,
9446 just mark that we cannot increment in place. */
9448 op0 = copy_to_reg (op0);
9453 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9454 && temp != get_last_insn ());
9455 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9457 /* Decide whether incrementing or decrementing. */
9458 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9459 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9460 this_optab = sub_optab;
9462 /* Convert decrement by a constant into a negative increment. */
9463 if (this_optab == sub_optab
9464 && GET_CODE (op1) == CONST_INT)
9466 op1 = GEN_INT (-INTVAL (op1));
9467 this_optab = add_optab;
9470 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9471 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9473 /* For a preincrement, see if we can do this with a single instruction. */
9476 icode = (int) this_optab->handlers[(int) mode].insn_code;
9477 if (icode != (int) CODE_FOR_nothing
9478 /* Make sure that OP0 is valid for operands 0 and 1
9479 of the insn we want to queue. */
9480 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9481 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9482 && (*insn_data[icode].operand[2].predicate) (op1, mode))
9486 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9487 then we cannot just increment OP0. We must therefore contrive to
9488 increment the original value. Then, for postincrement, we can return
9489 OP0 since it is a copy of the old value. For preincrement, expand here
9490 unless we can do it with a single insn.
9492 Likewise if storing directly into OP0 would clobber high bits
9493 we need to preserve (bad_subreg). */
9494 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
9496 /* This is the easiest way to increment the value wherever it is.
9497 Problems with multiple evaluation of INCREMENTED are prevented
9498 because either (1) it is a component_ref or preincrement,
9499 in which case it was stabilized above, or (2) it is an array_ref
9500 with constant index in an array in a register, which is
9501 safe to reevaluate. */
9502 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9503 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9504 ? MINUS_EXPR : PLUS_EXPR),
9507 TREE_OPERAND (exp, 1));
9509 while (TREE_CODE (incremented) == NOP_EXPR
9510 || TREE_CODE (incremented) == CONVERT_EXPR)
9512 newexp = convert (TREE_TYPE (incremented), newexp);
9513 incremented = TREE_OPERAND (incremented, 0);
9516 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9517 return post ? op0 : temp;
9522 /* We have a true reference to the value in OP0.
9523 If there is an insn to add or subtract in this mode, queue it.
9524 Queueing the increment insn avoids the register shuffling
9525 that often results if we must increment now and first save
9526 the old value for subsequent use. */
9528 #if 0 /* Turned off to avoid making extra insn for indexed memref. */
9529 op0 = stabilize (op0);
9532 icode = (int) this_optab->handlers[(int) mode].insn_code;
9533 if (icode != (int) CODE_FOR_nothing
9534 /* Make sure that OP0 is valid for operands 0 and 1
9535 of the insn we want to queue. */
9536 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9537 && (*insn_data[icode].operand[1].predicate) (op0, mode))
9539 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9540 op1 = force_reg (mode, op1);
9542 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9544 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9546 rtx addr = (general_operand (XEXP (op0, 0), mode)
9547 ? force_reg (Pmode, XEXP (op0, 0))
9548 : copy_to_reg (XEXP (op0, 0)));
9551 op0 = replace_equiv_address (op0, addr);
9552 temp = force_reg (GET_MODE (op0), op0);
9553 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
9554 op1 = force_reg (mode, op1);
9556 /* The increment queue is LIFO, thus we have to `queue'
9557 the instructions in reverse order. */
9558 enqueue_insn (op0, gen_move_insn (op0, temp));
9559 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9564 /* Preincrement, or we can't increment with one simple insn. */
9566 /* Save a copy of the value before inc or dec, to return it later. */
9567 temp = value = copy_to_reg (op0);
9569 /* Arrange to return the incremented value. */
9570 /* Copy the rtx because expand_binop will protect from the queue,
9571 and the results of that would be invalid for us to return
9572 if our caller does emit_queue before using our result. */
9573 temp = copy_rtx (value = op0);
9575 /* Increment however we can. */
9576 op1 = expand_binop (mode, this_optab, value, op1, op0,
9577 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9579 /* Make sure the value is stored into OP0. */
9581 emit_move_insn (op0, op1);
9586 /* At the start of a function, record that we have no previously-pushed
9587 arguments waiting to be popped. */
9590 init_pending_stack_adjust ()
9592 pending_stack_adjust = 0;
9595 /* When exiting from function, if safe, clear out any pending stack adjust
9596 so the adjustment won't get done.
9598 Note, if the current function calls alloca, then it must have a
9599 frame pointer regardless of the value of flag_omit_frame_pointer. */
9602 clear_pending_stack_adjust ()
9604 #ifdef EXIT_IGNORE_STACK
9606 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9607 && EXIT_IGNORE_STACK
9608 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9609 && ! flag_inline_functions)
9611 stack_pointer_delta -= pending_stack_adjust,
9612 pending_stack_adjust = 0;
9617 /* Pop any previously-pushed arguments that have not been popped yet. */
9620 do_pending_stack_adjust ()
9622 if (inhibit_defer_pop == 0)
9624 if (pending_stack_adjust != 0)
9625 adjust_stack (GEN_INT (pending_stack_adjust));
9626 pending_stack_adjust = 0;
9630 /* Expand conditional expressions. */
9632 /* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9633 LABEL is an rtx of code CODE_LABEL, in this function and all the
9637 jumpifnot (exp, label)
9641 do_jump (exp, label, NULL_RTX);
9644 /* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
9651 do_jump (exp, NULL_RTX, label);
9654 /* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9655 the result is zero, or IF_TRUE_LABEL if the result is one.
9656 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9657 meaning fall through in that case.
9659 do_jump always does any pending stack adjust except when it does not
9660 actually perform a jump. An example where there is no jump
9661 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
9663 This function is responsible for optimizing cases such as
9664 &&, || and comparison operators in EXP. */
9667 do_jump (exp, if_false_label, if_true_label)
9669 rtx if_false_label, if_true_label;
9671 enum tree_code code = TREE_CODE (exp);
9672 /* Some cases need to create a label to jump to
9673 in order to properly fall through.
9674 These cases set DROP_THROUGH_LABEL nonzero. */
9675 rtx drop_through_label = 0;
9679 enum machine_mode mode;
9681 #ifdef MAX_INTEGER_COMPUTATION_MODE
9682 check_max_integer_computation_mode (exp);
9693 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9699 /* This is not true with #pragma weak */
9701 /* The address of something can never be zero. */
9703 emit_jump (if_true_label);
9708 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9709 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9710 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF
9711 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_RANGE_REF)
9714 /* If we are narrowing the operand, we have to do the compare in the
9716 if ((TYPE_PRECISION (TREE_TYPE (exp))
9717 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9719 case NON_LVALUE_EXPR:
9720 case REFERENCE_EXPR:
9725 /* These cannot change zero->nonzero or vice versa. */
9726 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9729 case WITH_RECORD_EXPR:
9730 /* Put the object on the placeholder list, recurse through our first
9731 operand, and pop the list. */
9732 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9734 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9735 placeholder_list = TREE_CHAIN (placeholder_list);
9739 /* This is never less insns than evaluating the PLUS_EXPR followed by
9740 a test and can be longer if the test is eliminated. */
9742 /* Reduce to minus. */
9743 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9744 TREE_OPERAND (exp, 0),
9745 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9746 TREE_OPERAND (exp, 1))));
9747 /* Process as MINUS. */
9751 /* Nonzero iff operands of minus differ. */
9752 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9753 TREE_OPERAND (exp, 0),
9754 TREE_OPERAND (exp, 1)),
9755 NE, NE, if_false_label, if_true_label);
9759 /* If we are AND'ing with a small constant, do this comparison in the
9760 smallest type that fits. If the machine doesn't have comparisons
9761 that small, it will be converted back to the wider comparison.
9762 This helps if we are testing the sign bit of a narrower object.
9763 combine can't do this for us because it can't know whether a
9764 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
9766 if (! SLOW_BYTE_ACCESS
9767 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9768 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9769 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
9770 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9771 && (type = (*lang_hooks.types.type_for_mode) (mode, 1)) != 0
9772 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9773 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9774 != CODE_FOR_nothing))
9776 do_jump (convert (type, exp), if_false_label, if_true_label);
9781 case TRUTH_NOT_EXPR:
9782 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9785 case TRUTH_ANDIF_EXPR:
9786 if (if_false_label == 0)
9787 if_false_label = drop_through_label = gen_label_rtx ();
9788 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9789 start_cleanup_deferral ();
9790 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9791 end_cleanup_deferral ();
9794 case TRUTH_ORIF_EXPR:
9795 if (if_true_label == 0)
9796 if_true_label = drop_through_label = gen_label_rtx ();
9797 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9798 start_cleanup_deferral ();
9799 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9800 end_cleanup_deferral ();
9805 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9806 preserve_temp_slots (NULL_RTX);
9810 do_pending_stack_adjust ();
9811 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9817 case ARRAY_RANGE_REF:
9819 HOST_WIDE_INT bitsize, bitpos;
9821 enum machine_mode mode;
9826 /* Get description of this reference. We don't actually care
9827 about the underlying object here. */
9828 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9829 &unsignedp, &volatilep);
9831 type = (*lang_hooks.types.type_for_size) (bitsize, unsignedp);
9832 if (! SLOW_BYTE_ACCESS
9833 && type != 0 && bitsize >= 0
9834 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9835 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9836 != CODE_FOR_nothing))
9838 do_jump (convert (type, exp), if_false_label, if_true_label);
9845 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9846 if (integer_onep (TREE_OPERAND (exp, 1))
9847 && integer_zerop (TREE_OPERAND (exp, 2)))
9848 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9850 else if (integer_zerop (TREE_OPERAND (exp, 1))
9851 && integer_onep (TREE_OPERAND (exp, 2)))
9852 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9856 rtx label1 = gen_label_rtx ();
9857 drop_through_label = gen_label_rtx ();
9859 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
9861 start_cleanup_deferral ();
9862 /* Now the THEN-expression. */
9863 do_jump (TREE_OPERAND (exp, 1),
9864 if_false_label ? if_false_label : drop_through_label,
9865 if_true_label ? if_true_label : drop_through_label);
9866 /* In case the do_jump just above never jumps. */
9867 do_pending_stack_adjust ();
9868 emit_label (label1);
9870 /* Now the ELSE-expression. */
9871 do_jump (TREE_OPERAND (exp, 2),
9872 if_false_label ? if_false_label : drop_through_label,
9873 if_true_label ? if_true_label : drop_through_label);
9874 end_cleanup_deferral ();
9880 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9882 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9883 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9885 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9886 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9889 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9890 fold (build (EQ_EXPR, TREE_TYPE (exp),
9891 fold (build1 (REALPART_EXPR,
9892 TREE_TYPE (inner_type),
9894 fold (build1 (REALPART_EXPR,
9895 TREE_TYPE (inner_type),
9897 fold (build (EQ_EXPR, TREE_TYPE (exp),
9898 fold (build1 (IMAGPART_EXPR,
9899 TREE_TYPE (inner_type),
9901 fold (build1 (IMAGPART_EXPR,
9902 TREE_TYPE (inner_type),
9904 if_false_label, if_true_label);
9907 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9908 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9910 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9911 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
9912 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9914 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
9920 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9922 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9923 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9925 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9926 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9929 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9930 fold (build (NE_EXPR, TREE_TYPE (exp),
9931 fold (build1 (REALPART_EXPR,
9932 TREE_TYPE (inner_type),
9934 fold (build1 (REALPART_EXPR,
9935 TREE_TYPE (inner_type),
9937 fold (build (NE_EXPR, TREE_TYPE (exp),
9938 fold (build1 (IMAGPART_EXPR,
9939 TREE_TYPE (inner_type),
9941 fold (build1 (IMAGPART_EXPR,
9942 TREE_TYPE (inner_type),
9944 if_false_label, if_true_label);
9947 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9948 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9950 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9951 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
9952 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9954 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
9959 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9960 if (GET_MODE_CLASS (mode) == MODE_INT
9961 && ! can_compare_p (LT, mode, ccp_jump))
9962 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9964 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
9968 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9969 if (GET_MODE_CLASS (mode) == MODE_INT
9970 && ! can_compare_p (LE, mode, ccp_jump))
9971 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9973 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
9977 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9978 if (GET_MODE_CLASS (mode) == MODE_INT
9979 && ! can_compare_p (GT, mode, ccp_jump))
9980 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9982 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
9986 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9987 if (GET_MODE_CLASS (mode) == MODE_INT
9988 && ! can_compare_p (GE, mode, ccp_jump))
9989 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9991 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
9994 case UNORDERED_EXPR:
9997 enum rtx_code cmp, rcmp;
10000 if (code == UNORDERED_EXPR)
10001 cmp = UNORDERED, rcmp = ORDERED;
10003 cmp = ORDERED, rcmp = UNORDERED;
10004 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10007 if (! can_compare_p (cmp, mode, ccp_jump)
10008 && (can_compare_p (rcmp, mode, ccp_jump)
10009 /* If the target doesn't provide either UNORDERED or ORDERED
10010 comparisons, canonicalize on UNORDERED for the library. */
10011 || rcmp == UNORDERED))
10015 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
10017 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
10022 enum rtx_code rcode1;
10023 enum tree_code tcode2;
10028 goto unordered_bcc;
10032 goto unordered_bcc;
10036 goto unordered_bcc;
10040 goto unordered_bcc;
10044 goto unordered_bcc;
10047 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10048 if (can_compare_p (rcode1, mode, ccp_jump))
10049 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
10053 tree op0 = save_expr (TREE_OPERAND (exp, 0));
10054 tree op1 = save_expr (TREE_OPERAND (exp, 1));
10057 /* If the target doesn't support combined unordered
10058 compares, decompose into UNORDERED + comparison. */
10059 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
10060 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
10061 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
10062 do_jump (exp, if_false_label, if_true_label);
10068 __builtin_expect (<test>, 0) and
10069 __builtin_expect (<test>, 1)
10071 We need to do this here, so that <test> is not converted to a SCC
10072 operation on machines that use condition code registers and COMPARE
10073 like the PowerPC, and then the jump is done based on whether the SCC
10074 operation produced a 1 or 0. */
10076 /* Check for a built-in function. */
10077 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
10079 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
10080 tree arglist = TREE_OPERAND (exp, 1);
10082 if (TREE_CODE (fndecl) == FUNCTION_DECL
10083 && DECL_BUILT_IN (fndecl)
10084 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_EXPECT
10085 && arglist != NULL_TREE
10086 && TREE_CHAIN (arglist) != NULL_TREE)
10088 rtx seq = expand_builtin_expect_jump (exp, if_false_label,
10091 if (seq != NULL_RTX)
10098 /* fall through and generate the normal code. */
10102 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
10104 /* This is not needed any more and causes poor code since it causes
10105 comparisons and tests from non-SI objects to have different code
10107 /* Copy to register to avoid generating bad insns by cse
10108 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
10109 if (!cse_not_expected && GET_CODE (temp) == MEM)
10110 temp = copy_to_reg (temp);
10112 do_pending_stack_adjust ();
10113 /* Do any postincrements in the expression that was tested. */
10116 if (GET_CODE (temp) == CONST_INT
10117 || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
10118 || GET_CODE (temp) == LABEL_REF)
10120 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
10122 emit_jump (target);
10124 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
10125 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
10126 /* Note swapping the labels gives us not-equal. */
10127 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
10128 else if (GET_MODE (temp) != VOIDmode)
10129 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
10130 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
10131 GET_MODE (temp), NULL_RTX,
10132 if_false_label, if_true_label);
10137 if (drop_through_label)
10139 /* If do_jump produces code that might be jumped around,
10140 do any stack adjusts from that code, before the place
10141 where control merges in. */
10142 do_pending_stack_adjust ();
10143 emit_label (drop_through_label);
10147 /* Given a comparison expression EXP for values too wide to be compared
10148 with one insn, test the comparison and jump to the appropriate label.
10149 The code of EXP is ignored; we always test GT if SWAP is 0,
10150 and LT if SWAP is 1. */
10153 do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
10156 rtx if_false_label, if_true_label;
10158 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
10159 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
10160 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10161 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
10163 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
10166 /* Compare OP0 with OP1, word at a time, in mode MODE.
10167 UNSIGNEDP says to do unsigned comparison.
10168 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
10171 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
10172 enum machine_mode mode;
10175 rtx if_false_label, if_true_label;
10177 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10178 rtx drop_through_label = 0;
10181 if (! if_true_label || ! if_false_label)
10182 drop_through_label = gen_label_rtx ();
10183 if (! if_true_label)
10184 if_true_label = drop_through_label;
10185 if (! if_false_label)
10186 if_false_label = drop_through_label;
10188 /* Compare a word at a time, high order first. */
10189 for (i = 0; i < nwords; i++)
10191 rtx op0_word, op1_word;
10193 if (WORDS_BIG_ENDIAN)
10195 op0_word = operand_subword_force (op0, i, mode);
10196 op1_word = operand_subword_force (op1, i, mode);
10200 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10201 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10204 /* All but high-order word must be compared as unsigned. */
10205 do_compare_rtx_and_jump (op0_word, op1_word, GT,
10206 (unsignedp || i > 0), word_mode, NULL_RTX,
10207 NULL_RTX, if_true_label);
10209 /* Consider lower words only if these are equal. */
10210 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
10211 NULL_RTX, NULL_RTX, if_false_label);
10214 if (if_false_label)
10215 emit_jump (if_false_label);
10216 if (drop_through_label)
10217 emit_label (drop_through_label);
10220 /* Given an EQ_EXPR expression EXP for values too wide to be compared
10221 with one insn, test the comparison and jump to the appropriate label. */
10224 do_jump_by_parts_equality (exp, if_false_label, if_true_label)
10226 rtx if_false_label, if_true_label;
10228 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10229 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10230 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10231 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10233 rtx drop_through_label = 0;
10235 if (! if_false_label)
10236 drop_through_label = if_false_label = gen_label_rtx ();
10238 for (i = 0; i < nwords; i++)
10239 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
10240 operand_subword_force (op1, i, mode),
10241 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
10242 word_mode, NULL_RTX, if_false_label, NULL_RTX);
10245 emit_jump (if_true_label);
10246 if (drop_through_label)
10247 emit_label (drop_through_label);
10250 /* Jump according to whether OP0 is 0.
10251 We assume that OP0 has an integer mode that is too wide
10252 for the available compare insns. */
10255 do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
10257 rtx if_false_label, if_true_label;
10259 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
10262 rtx drop_through_label = 0;
10264 /* The fastest way of doing this comparison on almost any machine is to
10265 "or" all the words and compare the result. If all have to be loaded
10266 from memory and this is a very wide item, it's possible this may
10267 be slower, but that's highly unlikely. */
10269 part = gen_reg_rtx (word_mode);
10270 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
10271 for (i = 1; i < nwords && part != 0; i++)
10272 part = expand_binop (word_mode, ior_optab, part,
10273 operand_subword_force (op0, i, GET_MODE (op0)),
10274 part, 1, OPTAB_WIDEN);
10278 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
10279 NULL_RTX, if_false_label, if_true_label);
10284 /* If we couldn't do the "or" simply, do this with a series of compares. */
10285 if (! if_false_label)
10286 drop_through_label = if_false_label = gen_label_rtx ();
10288 for (i = 0; i < nwords; i++)
10289 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
10290 const0_rtx, EQ, 1, word_mode, NULL_RTX,
10291 if_false_label, NULL_RTX);
10294 emit_jump (if_true_label);
10296 if (drop_through_label)
10297 emit_label (drop_through_label);
10300 /* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
10301 (including code to compute the values to be compared)
10302 and set (CC0) according to the result.
10303 The decision as to signed or unsigned comparison must be made by the caller.
10305 We force a stack adjustment unless there are currently
10306 things pushed on the stack that aren't yet used.
10308 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10312 compare_from_rtx (op0, op1, code, unsignedp, mode, size)
10314 enum rtx_code code;
10316 enum machine_mode mode;
10319 enum rtx_code ucode;
10322 /* If one operand is constant, make it the second one. Only do this
10323 if the other operand is not constant as well. */
10325 if (swap_commutative_operands_p (op0, op1))
10330 code = swap_condition (code);
10333 if (flag_force_mem)
10335 op0 = force_not_mem (op0);
10336 op1 = force_not_mem (op1);
10339 do_pending_stack_adjust ();
10341 ucode = unsignedp ? unsigned_condition (code) : code;
10342 if ((tem = simplify_relational_operation (ucode, mode, op0, op1)) != 0)
10346 /* There's no need to do this now that combine.c can eliminate lots of
10347 sign extensions. This can be less efficient in certain cases on other
10350 /* If this is a signed equality comparison, we can do it as an
10351 unsigned comparison since zero-extension is cheaper than sign
10352 extension and comparisons with zero are done as unsigned. This is
10353 the case even on machines that can do fast sign extension, since
10354 zero-extension is easier to combine with other operations than
10355 sign-extension is. If we are comparing against a constant, we must
10356 convert it to what it would look like unsigned. */
10357 if ((code == EQ || code == NE) && ! unsignedp
10358 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10360 if (GET_CODE (op1) == CONST_INT
10361 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10362 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10367 emit_cmp_insn (op0, op1, code, size, mode, unsignedp);
10370 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
10372 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
10376 /* Like do_compare_and_jump but expects the values to compare as two rtx's.
10377 The decision as to signed or unsigned comparison must be made by the caller.
10379 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10383 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size,
10384 if_false_label, if_true_label)
10386 enum rtx_code code;
10388 enum machine_mode mode;
10390 rtx if_false_label, if_true_label;
10392 enum rtx_code ucode;
10394 int dummy_true_label = 0;
10396 /* Reverse the comparison if that is safe and we want to jump if it is
10398 if (! if_true_label && ! FLOAT_MODE_P (mode))
10400 if_true_label = if_false_label;
10401 if_false_label = 0;
10402 code = reverse_condition (code);
10405 /* If one operand is constant, make it the second one. Only do this
10406 if the other operand is not constant as well. */
10408 if (swap_commutative_operands_p (op0, op1))
10413 code = swap_condition (code);
10416 if (flag_force_mem)
10418 op0 = force_not_mem (op0);
10419 op1 = force_not_mem (op1);
10422 do_pending_stack_adjust ();
10424 ucode = unsignedp ? unsigned_condition (code) : code;
10425 if ((tem = simplify_relational_operation (ucode, mode, op0, op1)) != 0)
10427 if (tem == const_true_rtx)
10430 emit_jump (if_true_label);
10434 if (if_false_label)
10435 emit_jump (if_false_label);
10441 /* There's no need to do this now that combine.c can eliminate lots of
10442 sign extensions. This can be less efficient in certain cases on other
10445 /* If this is a signed equality comparison, we can do it as an
10446 unsigned comparison since zero-extension is cheaper than sign
10447 extension and comparisons with zero are done as unsigned. This is
10448 the case even on machines that can do fast sign extension, since
10449 zero-extension is easier to combine with other operations than
10450 sign-extension is. If we are comparing against a constant, we must
10451 convert it to what it would look like unsigned. */
10452 if ((code == EQ || code == NE) && ! unsignedp
10453 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10455 if (GET_CODE (op1) == CONST_INT
10456 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10457 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10462 if (! if_true_label)
10464 dummy_true_label = 1;
10465 if_true_label = gen_label_rtx ();
10468 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp,
10471 if (if_false_label)
10472 emit_jump (if_false_label);
10473 if (dummy_true_label)
10474 emit_label (if_true_label);
10477 /* Generate code for a comparison expression EXP (including code to compute
10478 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10479 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
10480 generated code will drop through.
10481 SIGNED_CODE should be the rtx operation for this comparison for
10482 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10484 We force a stack adjustment unless there are currently
10485 things pushed on the stack that aren't yet used. */
10488 do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
10491 enum rtx_code signed_code, unsigned_code;
10492 rtx if_false_label, if_true_label;
10496 enum machine_mode mode;
10498 enum rtx_code code;
10500 /* Don't crash if the comparison was erroneous. */
10501 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10502 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
10505 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10506 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
10509 type = TREE_TYPE (TREE_OPERAND (exp, 0));
10510 mode = TYPE_MODE (type);
10511 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
10512 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
10513 || (GET_MODE_BITSIZE (mode)
10514 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
10517 /* op0 might have been replaced by promoted constant, in which
10518 case the type of second argument should be used. */
10519 type = TREE_TYPE (TREE_OPERAND (exp, 1));
10520 mode = TYPE_MODE (type);
10522 unsignedp = TREE_UNSIGNED (type);
10523 code = unsignedp ? unsigned_code : signed_code;
10525 #ifdef HAVE_canonicalize_funcptr_for_compare
10526 /* If function pointers need to be "canonicalized" before they can
10527 be reliably compared, then canonicalize them. */
10528 if (HAVE_canonicalize_funcptr_for_compare
10529 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10530 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10533 rtx new_op0 = gen_reg_rtx (mode);
10535 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10539 if (HAVE_canonicalize_funcptr_for_compare
10540 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10541 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10544 rtx new_op1 = gen_reg_rtx (mode);
10546 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10551 /* Do any postincrements in the expression that was tested. */
10554 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10556 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10557 if_false_label, if_true_label);
10560 /* Generate code to calculate EXP using a store-flag instruction
10561 and return an rtx for the result. EXP is either a comparison
10562 or a TRUTH_NOT_EXPR whose operand is a comparison.
10564 If TARGET is nonzero, store the result there if convenient.
10566 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
10569 Return zero if there is no suitable set-flag instruction
10570 available on this machine.
10572 Once expand_expr has been called on the arguments of the comparison,
10573 we are committed to doing the store flag, since it is not safe to
10574 re-evaluate the expression. We emit the store-flag insn by calling
10575 emit_store_flag, but only expand the arguments if we have a reason
10576 to believe that emit_store_flag will be successful. If we think that
10577 it will, but it isn't, we have to simulate the store-flag with a
10578 set/jump/set sequence. */
10581 do_store_flag (exp, target, mode, only_cheap)
10584 enum machine_mode mode;
10587 enum rtx_code code;
10588 tree arg0, arg1, type;
10590 enum machine_mode operand_mode;
10594 enum insn_code icode;
10595 rtx subtarget = target;
10598 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10599 result at the end. We can't simply invert the test since it would
10600 have already been inverted if it were valid. This case occurs for
10601 some floating-point comparisons. */
10603 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10604 invert = 1, exp = TREE_OPERAND (exp, 0);
10606 arg0 = TREE_OPERAND (exp, 0);
10607 arg1 = TREE_OPERAND (exp, 1);
10609 /* Don't crash if the comparison was erroneous. */
10610 if (arg0 == error_mark_node || arg1 == error_mark_node)
10613 type = TREE_TYPE (arg0);
10614 operand_mode = TYPE_MODE (type);
10615 unsignedp = TREE_UNSIGNED (type);
10617 /* We won't bother with BLKmode store-flag operations because it would mean
10618 passing a lot of information to emit_store_flag. */
10619 if (operand_mode == BLKmode)
10622 /* We won't bother with store-flag operations involving function pointers
10623 when function pointers must be canonicalized before comparisons. */
10624 #ifdef HAVE_canonicalize_funcptr_for_compare
10625 if (HAVE_canonicalize_funcptr_for_compare
10626 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10627 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10629 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10630 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10631 == FUNCTION_TYPE))))
10638 /* Get the rtx comparison code to use. We know that EXP is a comparison
10639 operation of some type. Some comparisons against 1 and -1 can be
10640 converted to comparisons with zero. Do so here so that the tests
10641 below will be aware that we have a comparison with zero. These
10642 tests will not catch constants in the first operand, but constants
10643 are rarely passed as the first operand. */
10645 switch (TREE_CODE (exp))
10654 if (integer_onep (arg1))
10655 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10657 code = unsignedp ? LTU : LT;
10660 if (! unsignedp && integer_all_onesp (arg1))
10661 arg1 = integer_zero_node, code = LT;
10663 code = unsignedp ? LEU : LE;
10666 if (! unsignedp && integer_all_onesp (arg1))
10667 arg1 = integer_zero_node, code = GE;
10669 code = unsignedp ? GTU : GT;
10672 if (integer_onep (arg1))
10673 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10675 code = unsignedp ? GEU : GE;
10678 case UNORDERED_EXPR:
10704 /* Put a constant second. */
10705 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10707 tem = arg0; arg0 = arg1; arg1 = tem;
10708 code = swap_condition (code);
10711 /* If this is an equality or inequality test of a single bit, we can
10712 do this by shifting the bit being tested to the low-order bit and
10713 masking the result with the constant 1. If the condition was EQ,
10714 we xor it with 1. This does not require an scc insn and is faster
10715 than an scc insn even if we have it. */
10717 if ((code == NE || code == EQ)
10718 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10719 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10721 tree inner = TREE_OPERAND (arg0, 0);
10722 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10725 /* If INNER is a right shift of a constant and it plus BITNUM does
10726 not overflow, adjust BITNUM and INNER. */
10728 if (TREE_CODE (inner) == RSHIFT_EXPR
10729 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10730 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10731 && bitnum < TYPE_PRECISION (type)
10732 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10733 bitnum - TYPE_PRECISION (type)))
10735 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10736 inner = TREE_OPERAND (inner, 0);
10739 /* If we are going to be able to omit the AND below, we must do our
10740 operations as unsigned. If we must use the AND, we have a choice.
10741 Normally unsigned is faster, but for some machines signed is. */
10742 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10743 #ifdef LOAD_EXTEND_OP
10744 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10750 if (! get_subtarget (subtarget)
10751 || GET_MODE (subtarget) != operand_mode
10752 || ! safe_from_p (subtarget, inner, 1))
10755 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
10758 op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
10759 size_int (bitnum), subtarget, ops_unsignedp);
10761 if (GET_MODE (op0) != mode)
10762 op0 = convert_to_mode (mode, op0, ops_unsignedp);
10764 if ((code == EQ && ! invert) || (code == NE && invert))
10765 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10766 ops_unsignedp, OPTAB_LIB_WIDEN);
10768 /* Put the AND last so it can combine with more things. */
10769 if (bitnum != TYPE_PRECISION (type) - 1)
10770 op0 = expand_and (mode, op0, const1_rtx, subtarget);
10775 /* Now see if we are likely to be able to do this. Return if not. */
10776 if (! can_compare_p (code, operand_mode, ccp_store_flag))
10779 icode = setcc_gen_code[(int) code];
10780 if (icode == CODE_FOR_nothing
10781 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
10783 /* We can only do this if it is one of the special cases that
10784 can be handled without an scc insn. */
10785 if ((code == LT && integer_zerop (arg1))
10786 || (! only_cheap && code == GE && integer_zerop (arg1)))
10788 else if (BRANCH_COST >= 0
10789 && ! only_cheap && (code == NE || code == EQ)
10790 && TREE_CODE (type) != REAL_TYPE
10791 && ((abs_optab->handlers[(int) operand_mode].insn_code
10792 != CODE_FOR_nothing)
10793 || (ffs_optab->handlers[(int) operand_mode].insn_code
10794 != CODE_FOR_nothing)))
10800 if (! get_subtarget (target)
10801 || GET_MODE (subtarget) != operand_mode
10802 || ! safe_from_p (subtarget, arg1, 1))
10805 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10806 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10809 target = gen_reg_rtx (mode);
10811 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10812 because, if the emit_store_flag does anything it will succeed and
10813 OP0 and OP1 will not be used subsequently. */
10815 result = emit_store_flag (target, code,
10816 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10817 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10818 operand_mode, unsignedp, 1);
10823 result = expand_binop (mode, xor_optab, result, const1_rtx,
10824 result, 0, OPTAB_LIB_WIDEN);
10828 /* If this failed, we have to do this with set/compare/jump/set code. */
10829 if (GET_CODE (target) != REG
10830 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10831 target = gen_reg_rtx (GET_MODE (target));
10833 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10834 result = compare_from_rtx (op0, op1, code, unsignedp,
10835 operand_mode, NULL_RTX);
10836 if (GET_CODE (result) == CONST_INT)
10837 return (((result == const0_rtx && ! invert)
10838 || (result != const0_rtx && invert))
10839 ? const0_rtx : const1_rtx);
10841 /* The code of RESULT may not match CODE if compare_from_rtx
10842 decided to swap its operands and reverse the original code.
10844 We know that compare_from_rtx returns either a CONST_INT or
10845 a new comparison code, so it is safe to just extract the
10846 code from RESULT. */
10847 code = GET_CODE (result);
10849 label = gen_label_rtx ();
10850 if (bcc_gen_fctn[(int) code] == 0)
10853 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10854 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10855 emit_label (label);
10861 /* Stubs in case we haven't got a casesi insn. */
10862 #ifndef HAVE_casesi
10863 # define HAVE_casesi 0
10864 # define gen_casesi(a, b, c, d, e) (0)
10865 # define CODE_FOR_casesi CODE_FOR_nothing
10868 /* If the machine does not have a case insn that compares the bounds,
10869 this means extra overhead for dispatch tables, which raises the
10870 threshold for using them. */
10871 #ifndef CASE_VALUES_THRESHOLD
10872 #define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10873 #endif /* CASE_VALUES_THRESHOLD */
10876 case_values_threshold ()
10878 return CASE_VALUES_THRESHOLD;
10881 /* Attempt to generate a casesi instruction. Returns 1 if successful,
10882 0 otherwise (i.e. if there is no casesi instruction). */
10884 try_casesi (index_type, index_expr, minval, range,
10885 table_label, default_label)
10886 tree index_type, index_expr, minval, range;
10887 rtx table_label ATTRIBUTE_UNUSED;
10890 enum machine_mode index_mode = SImode;
10891 int index_bits = GET_MODE_BITSIZE (index_mode);
10892 rtx op1, op2, index;
10893 enum machine_mode op_mode;
10898 /* Convert the index to SImode. */
10899 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10901 enum machine_mode omode = TYPE_MODE (index_type);
10902 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
10904 /* We must handle the endpoints in the original mode. */
10905 index_expr = build (MINUS_EXPR, index_type,
10906 index_expr, minval);
10907 minval = integer_zero_node;
10908 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10909 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
10910 omode, 1, default_label);
10911 /* Now we can safely truncate. */
10912 index = convert_to_mode (index_mode, index, 0);
10916 if (TYPE_MODE (index_type) != index_mode)
10918 index_expr = convert ((*lang_hooks.types.type_for_size)
10919 (index_bits, 0), index_expr);
10920 index_type = TREE_TYPE (index_expr);
10923 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10926 index = protect_from_queue (index, 0);
10927 do_pending_stack_adjust ();
10929 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
10930 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
10932 index = copy_to_mode_reg (op_mode, index);
10934 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
10936 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
10937 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
10938 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
10939 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
10941 op1 = copy_to_mode_reg (op_mode, op1);
10943 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
10945 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
10946 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
10947 op2, TREE_UNSIGNED (TREE_TYPE (range)));
10948 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
10950 op2 = copy_to_mode_reg (op_mode, op2);
10952 emit_jump_insn (gen_casesi (index, op1, op2,
10953 table_label, default_label));
10957 /* Attempt to generate a tablejump instruction; same concept. */
10958 #ifndef HAVE_tablejump
10959 #define HAVE_tablejump 0
10960 #define gen_tablejump(x, y) (0)
10963 /* Subroutine of the next function.
10965 INDEX is the value being switched on, with the lowest value
10966 in the table already subtracted.
10967 MODE is its expected mode (needed if INDEX is constant).
10968 RANGE is the length of the jump table.
10969 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
10971 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10972 index value is out of range. */
10975 do_tablejump (index, mode, range, table_label, default_label)
10976 rtx index, range, table_label, default_label;
10977 enum machine_mode mode;
10981 if (INTVAL (range) > cfun->max_jumptable_ents)
10982 cfun->max_jumptable_ents = INTVAL (range);
10984 /* Do an unsigned comparison (in the proper mode) between the index
10985 expression and the value which represents the length of the range.
10986 Since we just finished subtracting the lower bound of the range
10987 from the index expression, this comparison allows us to simultaneously
10988 check that the original index expression value is both greater than
10989 or equal to the minimum value of the range and less than or equal to
10990 the maximum value of the range. */
10992 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10995 /* If index is in range, it must fit in Pmode.
10996 Convert to Pmode so we can index with it. */
10998 index = convert_to_mode (Pmode, index, 1);
11000 /* Don't let a MEM slip thru, because then INDEX that comes
11001 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11002 and break_out_memory_refs will go to work on it and mess it up. */
11003 #ifdef PIC_CASE_VECTOR_ADDRESS
11004 if (flag_pic && GET_CODE (index) != REG)
11005 index = copy_to_mode_reg (Pmode, index);
11008 /* If flag_force_addr were to affect this address
11009 it could interfere with the tricky assumptions made
11010 about addresses that contain label-refs,
11011 which may be valid only very near the tablejump itself. */
11012 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11013 GET_MODE_SIZE, because this indicates how large insns are. The other
11014 uses should all be Pmode, because they are addresses. This code
11015 could fail if addresses and insns are not the same size. */
11016 index = gen_rtx_PLUS (Pmode,
11017 gen_rtx_MULT (Pmode, index,
11018 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
11019 gen_rtx_LABEL_REF (Pmode, table_label));
11020 #ifdef PIC_CASE_VECTOR_ADDRESS
11022 index = PIC_CASE_VECTOR_ADDRESS (index);
11025 index = memory_address_noforce (CASE_VECTOR_MODE, index);
11026 temp = gen_reg_rtx (CASE_VECTOR_MODE);
11027 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
11028 RTX_UNCHANGING_P (vector) = 1;
11029 convert_move (temp, vector, 0);
11031 emit_jump_insn (gen_tablejump (temp, table_label));
11033 /* If we are generating PIC code or if the table is PC-relative, the
11034 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11035 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
11040 try_tablejump (index_type, index_expr, minval, range,
11041 table_label, default_label)
11042 tree index_type, index_expr, minval, range;
11043 rtx table_label, default_label;
11047 if (! HAVE_tablejump)
11050 index_expr = fold (build (MINUS_EXPR, index_type,
11051 convert (index_type, index_expr),
11052 convert (index_type, minval)));
11053 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
11055 index = protect_from_queue (index, 0);
11056 do_pending_stack_adjust ();
11058 do_tablejump (index, TYPE_MODE (index_type),
11059 convert_modes (TYPE_MODE (index_type),
11060 TYPE_MODE (TREE_TYPE (range)),
11061 expand_expr (range, NULL_RTX,
11063 TREE_UNSIGNED (TREE_TYPE (range))),
11064 table_label, default_label);
11068 /* Nonzero if the mode is a valid vector mode for this architecture.
11069 This returns nonzero even if there is no hardware support for the
11070 vector mode, but we can emulate with narrower modes. */
11073 vector_mode_valid_p (mode)
11074 enum machine_mode mode;
11076 enum mode_class class = GET_MODE_CLASS (mode);
11077 enum machine_mode innermode;
11079 /* Doh! What's going on? */
11080 if (class != MODE_VECTOR_INT
11081 && class != MODE_VECTOR_FLOAT)
11084 /* Hardware support. Woo hoo! */
11085 if (VECTOR_MODE_SUPPORTED_P (mode))
11088 innermode = GET_MODE_INNER (mode);
11090 /* We should probably return 1 if requesting V4DI and we have no DI,
11091 but we have V2DI, but this is probably very unlikely. */
11093 /* If we have support for the inner mode, we can safely emulate it.
11094 We may not have V2DI, but me can emulate with a pair of DIs. */
11095 return mov_optab->handlers[innermode].insn_code != CODE_FOR_nothing;
11098 #include "gt-expr.h"